@langchain/openai
Version:
OpenAI integrations for LangChain.js
1 lines • 38.4 kB
Source Map (JSON)
{"version":3,"file":"completions.cjs","names":["completionsApiContentBlockConverter: StandardContentBlockConverter<{\n text: ChatCompletionContentPartText;\n image: ChatCompletionContentPartImage;\n audio: ChatCompletionContentPartInputAudio;\n file: ChatCompletionContentPart.File;\n}>","mimeType: { type: string; subtype: string }","getRequiredFilenameFromMetadata","convertCompletionsMessageToBaseMessage: Converter<\n {\n message: OpenAIClient.Chat.Completions.ChatCompletionMessage;\n rawResponse: OpenAIClient.Chat.Completions.ChatCompletion;\n includeRawResponse?: boolean;\n },\n BaseMessage\n>","rawToolCalls: OpenAIToolCall[] | undefined","e: any","additional_kwargs: Record<string, unknown>","response_metadata: Record<string, unknown> | undefined","handleMultiModalOutput","AIMessage","ChatMessage","convertCompletionsDeltaToBaseMessageChunk: Converter<\n {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n delta: Record<string, any>;\n rawResponse: OpenAIClient.Chat.Completions.ChatCompletionChunk;\n includeRawResponse?: boolean;\n defaultRole?: OpenAIClient.Chat.ChatCompletionRole;\n },\n BaseMessageChunk\n>","HumanMessageChunk","toolCallChunks: ToolCallChunk[]","AIMessageChunk","SystemMessageChunk","FunctionMessageChunk","ToolMessageChunk","ChatMessageChunk","convertStandardContentBlockToCompletionsContentPart: Converter<\n ContentBlock.Standard,\n | OpenAIClient.Chat.Completions.ChatCompletionContentPartImage\n | OpenAIClient.Chat.Completions.ChatCompletionContentPartInputAudio\n | OpenAIClient.Chat.Completions.ChatCompletionContentPart.File\n | undefined\n>","format","convertStandardContentMessageToCompletionsMessage: Converter<\n { message: BaseMessage; model?: string },\n OpenAIClient.Chat.Completions.ChatCompletionMessageParam\n>","messageToOpenAIRole","isReasoningModel","ToolMessage","blocks: ContentBlock.Standard[]","convertMessagesToCompletionsMessageParams: Converter<\n { messages: BaseMessage[]; model?: string },\n OpenAIClient.Chat.Completions.ChatCompletionMessageParam[]\n>","completionParam: Record<string, any>","convertLangChainToolCallToOpenAI"],"sources":["../../src/converters/completions.ts"],"sourcesContent":["import {\n AIMessage,\n AIMessageChunk,\n BaseMessage,\n BaseMessageChunk,\n ChatMessage,\n ChatMessageChunk,\n FunctionMessageChunk,\n HumanMessageChunk,\n OpenAIToolCall,\n SystemMessageChunk,\n ToolCallChunk,\n ToolMessage,\n ToolMessageChunk,\n parseBase64DataUrl,\n parseMimeType,\n StandardContentBlockConverter,\n isDataContentBlock,\n ContentBlock,\n iife,\n convertToProviderContentBlock,\n} from \"@langchain/core/messages\";\nimport {\n convertLangChainToolCallToOpenAI,\n makeInvalidToolCall,\n parseToolCall,\n} from \"@langchain/core/output_parsers/openai_tools\";\nimport { Converter } from \"@langchain/core/utils/format\";\nimport type {\n ChatCompletionContentPartText,\n ChatCompletionContentPartImage,\n ChatCompletionContentPartInputAudio,\n ChatCompletionContentPart,\n} from \"openai/resources/chat/completions\";\nimport { OpenAI as OpenAIClient } from \"openai\";\nimport { handleMultiModalOutput } from \"../utils/output.js\";\nimport {\n getRequiredFilenameFromMetadata,\n isReasoningModel,\n messageToOpenAIRole,\n} from \"../utils/misc.js\";\n\n/**\n * @deprecated This converter is an internal detail of the OpenAI provider. Do not use it directly. This will be revisited in a future release.\n */\nexport const completionsApiContentBlockConverter: StandardContentBlockConverter<{\n text: ChatCompletionContentPartText;\n image: ChatCompletionContentPartImage;\n audio: ChatCompletionContentPartInputAudio;\n file: ChatCompletionContentPart.File;\n}> = {\n providerName: \"ChatOpenAI\",\n\n fromStandardTextBlock(block): ChatCompletionContentPartText {\n return { type: \"text\", text: block.text };\n },\n\n fromStandardImageBlock(block): ChatCompletionContentPartImage {\n if (block.source_type === \"url\") {\n return {\n type: \"image_url\",\n image_url: {\n url: block.url,\n ...(block.metadata?.detail\n ? { detail: block.metadata.detail as \"auto\" | \"low\" | \"high\" }\n : {}),\n },\n };\n }\n\n if (block.source_type === \"base64\") {\n const url = `data:${block.mime_type ?? \"\"};base64,${block.data}`;\n return {\n type: \"image_url\",\n image_url: {\n url,\n ...(block.metadata?.detail\n ? { detail: block.metadata.detail as \"auto\" | \"low\" | \"high\" }\n : {}),\n },\n };\n }\n\n throw new Error(\n `Image content blocks with source_type ${block.source_type} are not supported for ChatOpenAI`\n );\n },\n\n fromStandardAudioBlock(block): ChatCompletionContentPartInputAudio {\n if (block.source_type === \"url\") {\n const data = parseBase64DataUrl({ dataUrl: block.url });\n if (!data) {\n throw new Error(\n `URL audio blocks with source_type ${block.source_type} must be formatted as a data URL for ChatOpenAI`\n );\n }\n\n const rawMimeType = data.mime_type || block.mime_type || \"\";\n let mimeType: { type: string; subtype: string };\n\n try {\n mimeType = parseMimeType(rawMimeType);\n } catch {\n throw new Error(\n `Audio blocks with source_type ${block.source_type} must have mime type of audio/wav or audio/mp3`\n );\n }\n\n if (\n mimeType.type !== \"audio\" ||\n (mimeType.subtype !== \"wav\" && mimeType.subtype !== \"mp3\")\n ) {\n throw new Error(\n `Audio blocks with source_type ${block.source_type} must have mime type of audio/wav or audio/mp3`\n );\n }\n\n return {\n type: \"input_audio\",\n input_audio: {\n format: mimeType.subtype,\n data: data.data,\n },\n };\n }\n\n if (block.source_type === \"base64\") {\n let mimeType: { type: string; subtype: string };\n\n try {\n mimeType = parseMimeType(block.mime_type ?? \"\");\n } catch {\n throw new Error(\n `Audio blocks with source_type ${block.source_type} must have mime type of audio/wav or audio/mp3`\n );\n }\n\n if (\n mimeType.type !== \"audio\" ||\n (mimeType.subtype !== \"wav\" && mimeType.subtype !== \"mp3\")\n ) {\n throw new Error(\n `Audio blocks with source_type ${block.source_type} must have mime type of audio/wav or audio/mp3`\n );\n }\n\n return {\n type: \"input_audio\",\n input_audio: {\n format: mimeType.subtype,\n data: block.data,\n },\n };\n }\n\n throw new Error(\n `Audio content blocks with source_type ${block.source_type} are not supported for ChatOpenAI`\n );\n },\n\n fromStandardFileBlock(block): ChatCompletionContentPart.File {\n if (block.source_type === \"url\") {\n const data = parseBase64DataUrl({ dataUrl: block.url });\n\n const filename = getRequiredFilenameFromMetadata(block);\n\n if (!data) {\n throw new Error(\n `URL file blocks with source_type ${block.source_type} must be formatted as a data URL for ChatOpenAI`\n );\n }\n\n return {\n type: \"file\",\n file: {\n file_data: block.url, // formatted as base64 data URL\n ...(block.metadata?.filename || block.metadata?.name\n ? {\n filename,\n }\n : {}),\n },\n };\n }\n\n if (block.source_type === \"base64\") {\n const filename = getRequiredFilenameFromMetadata(block);\n\n return {\n type: \"file\",\n file: {\n file_data: `data:${block.mime_type ?? \"\"};base64,${block.data}`,\n ...(block.metadata?.filename ||\n block.metadata?.name ||\n block.metadata?.title\n ? {\n filename,\n }\n : {}),\n },\n };\n }\n\n if (block.source_type === \"id\") {\n return {\n type: \"file\",\n file: {\n file_id: block.id,\n },\n };\n }\n\n throw new Error(\n `File content blocks with source_type ${block.source_type} are not supported for ChatOpenAI`\n );\n },\n};\n\n/**\n * Converts an OpenAI Chat Completions API message to a LangChain BaseMessage.\n *\n * This converter transforms messages from OpenAI's Chat Completions API format into\n * LangChain's internal message representation, handling various message types and\n * preserving metadata, tool calls, and other relevant information.\n *\n * @remarks\n * The converter handles the following message roles:\n * - `assistant`: Converted to {@link AIMessage} with support for tool calls, function calls,\n * audio content, and multi-modal outputs\n * - Other roles: Converted to generic {@link ChatMessage}\n *\n * For assistant messages, the converter:\n * - Parses and validates tool calls, separating valid and invalid calls\n * - Preserves function call information in additional_kwargs\n * - Includes usage statistics and system fingerprint in response_metadata\n * - Handles multi-modal content (text, images, audio)\n * - Optionally includes the raw API response for debugging\n *\n * @param params - Conversion parameters\n * @param params.message - The OpenAI chat completion message to convert\n * @param params.rawResponse - The complete raw response from OpenAI's API, used to extract\n * metadata like model name, usage statistics, and system fingerprint\n * @param params.includeRawResponse - If true, includes the raw OpenAI response in the\n * message's additional_kwargs under the `__raw_response` key. Useful for debugging\n * or accessing provider-specific fields. Defaults to false.\n *\n * @returns A LangChain BaseMessage instance:\n * - {@link AIMessage} for assistant messages with tool calls, metadata, and content\n * - {@link ChatMessage} for all other message types\n *\n * @example\n * ```typescript\n * const baseMessage = convertCompletionsMessageToBaseMessage({\n * message: {\n * role: \"assistant\",\n * content: \"Hello! How can I help you?\",\n * tool_calls: [\n * {\n * id: \"call_123\",\n * type: \"function\",\n * function: { name: \"get_weather\", arguments: '{\"location\":\"NYC\"}' }\n * }\n * ]\n * },\n * rawResponse: completionResponse,\n * includeRawResponse: true\n * });\n * // Returns an AIMessage with parsed tool calls and metadata\n * ```\n *\n * @throws {Error} If tool call parsing fails, the invalid tool call is captured in\n * the `invalid_tool_calls` array rather than throwing an error\n *\n */\nexport const convertCompletionsMessageToBaseMessage: Converter<\n {\n message: OpenAIClient.Chat.Completions.ChatCompletionMessage;\n rawResponse: OpenAIClient.Chat.Completions.ChatCompletion;\n includeRawResponse?: boolean;\n },\n BaseMessage\n> = ({ message, rawResponse, includeRawResponse }) => {\n const rawToolCalls: OpenAIToolCall[] | undefined = message.tool_calls as\n | OpenAIToolCall[]\n | undefined;\n switch (message.role) {\n case \"assistant\": {\n const toolCalls = [];\n const invalidToolCalls = [];\n for (const rawToolCall of rawToolCalls ?? []) {\n try {\n toolCalls.push(parseToolCall(rawToolCall, { returnId: true }));\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n } catch (e: any) {\n invalidToolCalls.push(makeInvalidToolCall(rawToolCall, e.message));\n }\n }\n const additional_kwargs: Record<string, unknown> = {\n function_call: message.function_call,\n tool_calls: rawToolCalls,\n };\n if (includeRawResponse !== undefined) {\n additional_kwargs.__raw_response = rawResponse;\n }\n const response_metadata: Record<string, unknown> | undefined = {\n model_provider: \"openai\",\n model_name: rawResponse.model,\n ...(rawResponse.system_fingerprint\n ? {\n usage: { ...rawResponse.usage },\n system_fingerprint: rawResponse.system_fingerprint,\n }\n : {}),\n };\n\n if (message.audio) {\n additional_kwargs.audio = message.audio;\n }\n\n const content = handleMultiModalOutput(\n message.content || \"\",\n rawResponse.choices?.[0]?.message\n );\n return new AIMessage({\n content,\n tool_calls: toolCalls,\n invalid_tool_calls: invalidToolCalls,\n additional_kwargs,\n response_metadata,\n id: rawResponse.id,\n });\n }\n default:\n return new ChatMessage(message.content || \"\", message.role ?? \"unknown\");\n }\n};\n\n/**\n * Converts an OpenAI Chat Completions API delta (streaming chunk) to a LangChain BaseMessageChunk.\n *\n * This converter is used during streaming responses to transform incremental updates from OpenAI's\n * Chat Completions API into LangChain message chunks. It handles various message types, tool calls,\n * function calls, audio content, and role-specific message chunk creation.\n *\n * @param params - Conversion parameters\n * @param params.delta - The delta object from an OpenAI streaming chunk containing incremental\n * message updates. May include content, role, tool_calls, function_call, audio, etc.\n * @param params.rawResponse - The complete raw ChatCompletionChunk response from OpenAI,\n * containing metadata like model info, usage stats, and the delta\n * @param params.includeRawResponse - Optional flag to include the raw OpenAI response in the\n * message chunk's additional_kwargs. Useful for debugging or accessing provider-specific data\n * @param params.defaultRole - Optional default role to use if the delta doesn't specify one.\n * Typically used to maintain role consistency across chunks in a streaming response\n *\n * @returns A BaseMessageChunk subclass appropriate for the message role:\n * - HumanMessageChunk for \"user\" role\n * - AIMessageChunk for \"assistant\" role (includes tool call chunks)\n * - SystemMessageChunk for \"system\" or \"developer\" roles\n * - FunctionMessageChunk for \"function\" role\n * - ToolMessageChunk for \"tool\" role\n * - ChatMessageChunk for any other role\n *\n * @example\n * Basic streaming text chunk:\n * ```typescript\n * const chunk = convertCompletionsDeltaToBaseMessageChunk({\n * delta: { role: \"assistant\", content: \"Hello\" },\n * rawResponse: { id: \"chatcmpl-123\", model: \"gpt-4\", ... }\n * });\n * // Returns: AIMessageChunk with content \"Hello\"\n * ```\n *\n * @example\n * Streaming chunk with tool call:\n * ```typescript\n * const chunk = convertCompletionsDeltaToBaseMessageChunk({\n * delta: {\n * role: \"assistant\",\n * tool_calls: [{\n * index: 0,\n * id: \"call_123\",\n * function: { name: \"get_weather\", arguments: '{\"location\":' }\n * }]\n * },\n * rawResponse: { id: \"chatcmpl-123\", ... }\n * });\n * // Returns: AIMessageChunk with tool_call_chunks containing partial tool call data\n * ```\n *\n * @remarks\n * - Tool calls are converted to ToolCallChunk objects with incremental data\n * - Audio content includes the chunk index from the raw response\n * - The \"developer\" role is mapped to SystemMessageChunk with a special marker\n * - Response metadata includes model provider info and usage statistics\n * - Function calls and tool calls are stored in additional_kwargs for compatibility\n */\nexport const convertCompletionsDeltaToBaseMessageChunk: Converter<\n {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n delta: Record<string, any>;\n rawResponse: OpenAIClient.Chat.Completions.ChatCompletionChunk;\n includeRawResponse?: boolean;\n defaultRole?: OpenAIClient.Chat.ChatCompletionRole;\n },\n BaseMessageChunk\n> = ({ delta, rawResponse, includeRawResponse, defaultRole }) => {\n const role = delta.role ?? defaultRole;\n const content = delta.content ?? \"\";\n let additional_kwargs: Record<string, unknown>;\n if (delta.function_call) {\n additional_kwargs = {\n function_call: delta.function_call,\n };\n } else if (delta.tool_calls) {\n additional_kwargs = {\n tool_calls: delta.tool_calls,\n };\n } else {\n additional_kwargs = {};\n }\n if (includeRawResponse) {\n additional_kwargs.__raw_response = rawResponse;\n }\n\n if (delta.audio) {\n additional_kwargs.audio = {\n ...delta.audio,\n index: rawResponse.choices[0].index,\n };\n }\n\n const response_metadata = {\n model_provider: \"openai\",\n usage: { ...rawResponse.usage },\n };\n if (role === \"user\") {\n return new HumanMessageChunk({ content, response_metadata });\n } else if (role === \"assistant\") {\n const toolCallChunks: ToolCallChunk[] = [];\n if (Array.isArray(delta.tool_calls)) {\n for (const rawToolCall of delta.tool_calls) {\n toolCallChunks.push({\n name: rawToolCall.function?.name,\n args: rawToolCall.function?.arguments,\n id: rawToolCall.id,\n index: rawToolCall.index,\n type: \"tool_call_chunk\",\n });\n }\n }\n return new AIMessageChunk({\n content,\n tool_call_chunks: toolCallChunks,\n additional_kwargs,\n id: rawResponse.id,\n response_metadata,\n });\n } else if (role === \"system\") {\n return new SystemMessageChunk({ content, response_metadata });\n } else if (role === \"developer\") {\n return new SystemMessageChunk({\n content,\n response_metadata,\n additional_kwargs: {\n __openai_role__: \"developer\",\n },\n });\n } else if (role === \"function\") {\n return new FunctionMessageChunk({\n content,\n additional_kwargs,\n name: delta.name,\n response_metadata,\n });\n } else if (role === \"tool\") {\n return new ToolMessageChunk({\n content,\n additional_kwargs,\n tool_call_id: delta.tool_call_id,\n response_metadata,\n });\n } else {\n return new ChatMessageChunk({ content, role, response_metadata });\n }\n};\n\n/**\n * Converts a standard LangChain content block to an OpenAI Completions API content part.\n *\n * This converter transforms LangChain's standardized content blocks (image, audio, file)\n * into the format expected by OpenAI's Chat Completions API. It handles various content\n * types including images (URL or base64), audio (base64), and files (data or file ID).\n *\n * @param block - The standard content block to convert. Can be an image, audio, or file block.\n *\n * @returns An OpenAI Chat Completions content part object, or undefined if the block\n * cannot be converted (e.g., missing required data).\n *\n * @example\n * Image with URL:\n * ```typescript\n * const block = { type: \"image\", url: \"https://example.com/image.jpg\" };\n * const part = convertStandardContentBlockToCompletionsContentPart(block);\n * // Returns: { type: \"image_url\", image_url: { url: \"https://example.com/image.jpg\" } }\n * ```\n *\n * @example\n * Image with base64 data:\n * ```typescript\n * const block = { type: \"image\", data: \"iVBORw0KGgo...\", mimeType: \"image/png\" };\n * const part = convertStandardContentBlockToCompletionsContentPart(block);\n * // Returns: { type: \"image_url\", image_url: { url: \"data:image/png;base64,iVBORw0KGgo...\" } }\n * ```\n */\nexport const convertStandardContentBlockToCompletionsContentPart: Converter<\n ContentBlock.Standard,\n | OpenAIClient.Chat.Completions.ChatCompletionContentPartImage\n | OpenAIClient.Chat.Completions.ChatCompletionContentPartInputAudio\n | OpenAIClient.Chat.Completions.ChatCompletionContentPart.File\n | undefined\n> = (block) => {\n if (block.type === \"image\") {\n if (block.url) {\n return {\n type: \"image_url\",\n image_url: {\n url: block.url,\n },\n };\n } else if (block.data) {\n return {\n type: \"image_url\",\n image_url: {\n url: `data:${block.mimeType};base64,${block.data}`,\n },\n };\n }\n }\n if (block.type === \"audio\") {\n if (block.data) {\n const format = iife(() => {\n const [, format] = block.mimeType.split(\"/\");\n if (format === \"wav\" || format === \"mp3\") {\n return format;\n }\n return \"wav\";\n });\n return {\n type: \"input_audio\",\n input_audio: {\n data: block.data.toString(),\n format,\n },\n };\n }\n }\n if (block.type === \"file\") {\n if (block.data) {\n const filename = getRequiredFilenameFromMetadata(block);\n\n return {\n type: \"file\",\n file: {\n file_data: `data:${block.mimeType};base64,${block.data}`,\n filename: filename,\n },\n };\n }\n if (block.fileId) {\n return {\n type: \"file\",\n file: {\n file_id: block.fileId,\n },\n };\n }\n }\n return undefined;\n};\n\n/**\n * Converts a LangChain BaseMessage with standard content blocks to an OpenAI Chat Completions API message parameter.\n *\n * This converter transforms LangChain's standardized message format (using contentBlocks) into the format\n * expected by OpenAI's Chat Completions API. It handles role mapping, content filtering, and multi-modal\n * content conversion for various message types.\n *\n * @remarks\n * The converter performs the following transformations:\n * - Maps LangChain message roles to OpenAI API roles (user, assistant, system, developer, tool, function)\n * - For reasoning models, automatically converts \"system\" role to \"developer\" role\n * - Filters content blocks based on message role (most roles only include text blocks)\n * - For user messages, converts multi-modal content blocks (images, audio, files) to OpenAI format\n * - Preserves tool call IDs for tool messages and function names for function messages\n *\n * Role-specific behavior:\n * - **developer**: Returns only text content blocks (used for reasoning models)\n * - **system**: Returns only text content blocks\n * - **assistant**: Returns only text content blocks\n * - **tool**: Returns only text content blocks with tool_call_id preserved\n * - **function**: Returns text content blocks joined as a single string with function name\n * - **user** (default): Returns multi-modal content including text, images, audio, and files\n *\n * @param params - Conversion parameters\n * @param params.message - The LangChain BaseMessage to convert. Must have contentBlocks property\n * containing an array of standard content blocks (text, image, audio, file, etc.)\n * @param params.model - Optional model name. Used to determine if special role mapping is needed\n * (e.g., \"system\" -> \"developer\" for reasoning models like o1)\n *\n * @returns An OpenAI ChatCompletionMessageParam object formatted for the Chat Completions API.\n * The structure varies by role:\n * - Developer/System/Assistant: `{ role, content: TextBlock[] }`\n * - Tool: `{ role: \"tool\", tool_call_id, content: TextBlock[] }`\n * - Function: `{ role: \"function\", name, content: string }`\n * - User: `{ role: \"user\", content: Array<TextPart | ImagePart | AudioPart | FilePart> }`\n *\n * @example\n * Simple text message:\n * ```typescript\n * const message = new HumanMessage({\n * content: [{ type: \"text\", text: \"Hello!\" }]\n * });\n * const param = convertStandardContentMessageToCompletionsMessage({ message });\n * // Returns: { role: \"user\", content: [{ type: \"text\", text: \"Hello!\" }] }\n * ```\n *\n * @example\n * Multi-modal user message with image:\n * ```typescript\n * const message = new HumanMessage({\n * content: [\n * { type: \"text\", text: \"What's in this image?\" },\n * { type: \"image\", url: \"https://example.com/image.jpg\" }\n * ]\n * });\n * const param = convertStandardContentMessageToCompletionsMessage({ message });\n * // Returns: {\n * // role: \"user\",\n * // content: [\n * // { type: \"text\", text: \"What's in this image?\" },\n * // { type: \"image_url\", image_url: { url: \"https://example.com/image.jpg\" } }\n * // ]\n * // }\n * ```\n */\nexport const convertStandardContentMessageToCompletionsMessage: Converter<\n { message: BaseMessage; model?: string },\n OpenAIClient.Chat.Completions.ChatCompletionMessageParam\n> = ({ message, model }) => {\n let role = messageToOpenAIRole(message);\n if (role === \"system\" && isReasoningModel(model)) {\n role = \"developer\";\n }\n if (role === \"developer\") {\n return {\n role: \"developer\",\n content: message.contentBlocks.filter((block) => block.type === \"text\"),\n };\n } else if (role === \"system\") {\n return {\n role: \"system\",\n content: message.contentBlocks.filter((block) => block.type === \"text\"),\n };\n } else if (role === \"assistant\") {\n return {\n role: \"assistant\",\n content: message.contentBlocks.filter((block) => block.type === \"text\"),\n };\n } else if (role === \"tool\" && ToolMessage.isInstance(message)) {\n return {\n role: \"tool\",\n tool_call_id: message.tool_call_id,\n content: message.contentBlocks.filter((block) => block.type === \"text\"),\n };\n } else if (role === \"function\") {\n return {\n role: \"function\",\n name: message.name ?? \"\",\n content: message.contentBlocks\n .filter((block) => block.type === \"text\")\n .join(\"\"),\n };\n }\n // Default to user message handling\n function* iterateUserContent(blocks: ContentBlock.Standard[]) {\n for (const block of blocks) {\n if (block.type === \"text\") {\n yield {\n type: \"text\" as const,\n text: block.text,\n };\n }\n const data = convertStandardContentBlockToCompletionsContentPart(block);\n if (data) {\n yield data;\n }\n }\n }\n return {\n role: \"user\",\n content: Array.from(iterateUserContent(message.contentBlocks)),\n };\n};\n\n/**\n * Converts an array of LangChain BaseMessages to OpenAI Chat Completions API message parameters.\n *\n * This converter transforms LangChain's internal message representation into the format required\n * by OpenAI's Chat Completions API. It handles various message types, roles, content formats,\n * tool calls, function calls, audio messages, and special model-specific requirements.\n *\n * @remarks\n * The converter performs several key transformations:\n * - Maps LangChain message types to OpenAI roles (user, assistant, system, tool, function, developer)\n * - Converts standard content blocks (v1 format) using a specialized converter\n * - Handles multimodal content including text, images, audio, and data blocks\n * - Preserves tool calls and function calls with proper formatting\n * - Applies model-specific role mappings (e.g., \"system\" → \"developer\" for reasoning models)\n * - Splits audio messages into separate message parameters when needed\n *\n * @param params - Conversion parameters\n * @param params.messages - Array of LangChain BaseMessages to convert. Can include any message\n * type: HumanMessage, AIMessage, SystemMessage, ToolMessage, FunctionMessage, etc.\n * @param params.model - Optional model name used to determine if special role mapping is needed.\n * For reasoning models (o1, o3, etc.), \"system\" role is converted to \"developer\" role.\n *\n * @returns Array of ChatCompletionMessageParam objects formatted for OpenAI's Chat Completions API.\n * Some messages may be split into multiple parameters (e.g., audio messages).\n *\n * @example\n * Basic message conversion:\n * ```typescript\n * const messages = [\n * new HumanMessage(\"What's the weather like?\"),\n * new AIMessage(\"Let me check that for you.\")\n * ];\n *\n * const params = convertMessagesToCompletionsMessageParams({\n * messages,\n * model: \"gpt-4\"\n * });\n * // Returns:\n * // [\n * // { role: \"user\", content: \"What's the weather like?\" },\n * // { role: \"assistant\", content: \"Let me check that for you.\" }\n * // ]\n * ```\n *\n * @example\n * Message with tool calls:\n * ```typescript\n * const messages = [\n * new AIMessage({\n * content: \"\",\n * tool_calls: [{\n * id: \"call_123\",\n * name: \"get_weather\",\n * args: { location: \"San Francisco\" }\n * }]\n * })\n * ];\n *\n * const params = convertMessagesToCompletionsMessageParams({ messages });\n * // Returns:\n * // [{\n * // role: \"assistant\",\n * // content: \"\",\n * // tool_calls: [{\n * // id: \"call_123\",\n * // type: \"function\",\n * // function: { name: \"get_weather\", arguments: '{\"location\":\"San Francisco\"}' }\n * // }]\n * // }]\n * ```\n */\nexport const convertMessagesToCompletionsMessageParams: Converter<\n { messages: BaseMessage[]; model?: string },\n OpenAIClient.Chat.Completions.ChatCompletionMessageParam[]\n> = ({ messages, model }) => {\n return messages.flatMap((message) => {\n if (\n \"output_version\" in message.response_metadata &&\n message.response_metadata?.output_version === \"v1\"\n ) {\n return convertStandardContentMessageToCompletionsMessage({ message });\n }\n let role = messageToOpenAIRole(message);\n if (role === \"system\" && isReasoningModel(model)) {\n role = \"developer\";\n }\n\n const content =\n typeof message.content === \"string\"\n ? message.content\n : message.content.map((m) => {\n if (isDataContentBlock(m)) {\n return convertToProviderContentBlock(\n m,\n completionsApiContentBlockConverter\n );\n }\n return m;\n });\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const completionParam: Record<string, any> = {\n role,\n content,\n };\n if (message.name != null) {\n completionParam.name = message.name;\n }\n if (message.additional_kwargs.function_call != null) {\n completionParam.function_call = message.additional_kwargs.function_call;\n }\n if (AIMessage.isInstance(message) && !!message.tool_calls?.length) {\n completionParam.tool_calls = message.tool_calls.map(\n convertLangChainToolCallToOpenAI\n );\n } else {\n if (message.additional_kwargs.tool_calls != null) {\n completionParam.tool_calls = message.additional_kwargs.tool_calls;\n }\n if (ToolMessage.isInstance(message) && message.tool_call_id != null) {\n completionParam.tool_call_id = message.tool_call_id;\n }\n }\n\n if (\n message.additional_kwargs.audio &&\n typeof message.additional_kwargs.audio === \"object\" &&\n \"id\" in message.additional_kwargs.audio\n ) {\n const audioMessage = {\n role: \"assistant\",\n audio: {\n id: message.additional_kwargs.audio.id,\n },\n };\n return [\n completionParam,\n audioMessage,\n ] as OpenAIClient.Chat.Completions.ChatCompletionMessageParam[];\n }\n\n return completionParam as OpenAIClient.Chat.Completions.ChatCompletionMessageParam;\n });\n};\n"],"mappings":";;;;;;;;;;AA6CA,MAAaA,sCAKR;CACH,cAAc;CAEd,sBAAsB,OAAsC;AAC1D,SAAO;GAAE,MAAM;GAAQ,MAAM,MAAM;EAAM;CAC1C;CAED,uBAAuB,OAAuC;AAC5D,MAAI,MAAM,gBAAgB,MACxB,QAAO;GACL,MAAM;GACN,WAAW;IACT,KAAK,MAAM;IACX,GAAI,MAAM,UAAU,SAChB,EAAE,QAAQ,MAAM,SAAS,OAAmC,IAC5D,CAAE;GACP;EACF;AAGH,MAAI,MAAM,gBAAgB,UAAU;GAClC,MAAM,MAAM,CAAC,KAAK,EAAE,MAAM,aAAa,GAAG,QAAQ,EAAE,MAAM,MAAM;AAChE,UAAO;IACL,MAAM;IACN,WAAW;KACT;KACA,GAAI,MAAM,UAAU,SAChB,EAAE,QAAQ,MAAM,SAAS,OAAmC,IAC5D,CAAE;IACP;GACF;EACF;AAED,QAAM,IAAI,MACR,CAAC,sCAAsC,EAAE,MAAM,YAAY,iCAAiC,CAAC;CAEhG;CAED,uBAAuB,OAA4C;AACjE,MAAI,MAAM,gBAAgB,OAAO;GAC/B,MAAM,yDAA0B,EAAE,SAAS,MAAM,IAAK,EAAC;AACvD,OAAI,CAAC,KACH,OAAM,IAAI,MACR,CAAC,kCAAkC,EAAE,MAAM,YAAY,+CAA+C,CAAC;GAI3G,MAAM,cAAc,KAAK,aAAa,MAAM,aAAa;GACzD,IAAIC;AAEJ,OAAI;IACF,wDAAyB,YAAY;GACtC,QAAO;AACN,UAAM,IAAI,MACR,CAAC,8BAA8B,EAAE,MAAM,YAAY,8CAA8C,CAAC;GAErG;AAED,OACE,SAAS,SAAS,WACjB,SAAS,YAAY,SAAS,SAAS,YAAY,MAEpD,OAAM,IAAI,MACR,CAAC,8BAA8B,EAAE,MAAM,YAAY,8CAA8C,CAAC;AAItG,UAAO;IACL,MAAM;IACN,aAAa;KACX,QAAQ,SAAS;KACjB,MAAM,KAAK;IACZ;GACF;EACF;AAED,MAAI,MAAM,gBAAgB,UAAU;GAClC,IAAIA;AAEJ,OAAI;IACF,wDAAyB,MAAM,aAAa,GAAG;GAChD,QAAO;AACN,UAAM,IAAI,MACR,CAAC,8BAA8B,EAAE,MAAM,YAAY,8CAA8C,CAAC;GAErG;AAED,OACE,SAAS,SAAS,WACjB,SAAS,YAAY,SAAS,SAAS,YAAY,MAEpD,OAAM,IAAI,MACR,CAAC,8BAA8B,EAAE,MAAM,YAAY,8CAA8C,CAAC;AAItG,UAAO;IACL,MAAM;IACN,aAAa;KACX,QAAQ,SAAS;KACjB,MAAM,MAAM;IACb;GACF;EACF;AAED,QAAM,IAAI,MACR,CAAC,sCAAsC,EAAE,MAAM,YAAY,iCAAiC,CAAC;CAEhG;CAED,sBAAsB,OAAuC;AAC3D,MAAI,MAAM,gBAAgB,OAAO;GAC/B,MAAM,yDAA0B,EAAE,SAAS,MAAM,IAAK,EAAC;GAEvD,MAAM,WAAWC,6CAAgC,MAAM;AAEvD,OAAI,CAAC,KACH,OAAM,IAAI,MACR,CAAC,iCAAiC,EAAE,MAAM,YAAY,+CAA+C,CAAC;AAI1G,UAAO;IACL,MAAM;IACN,MAAM;KACJ,WAAW,MAAM;KACjB,GAAI,MAAM,UAAU,YAAY,MAAM,UAAU,OAC5C,EACE,SACD,IACD,CAAE;IACP;GACF;EACF;AAED,MAAI,MAAM,gBAAgB,UAAU;GAClC,MAAM,WAAWA,6CAAgC,MAAM;AAEvD,UAAO;IACL,MAAM;IACN,MAAM;KACJ,WAAW,CAAC,KAAK,EAAE,MAAM,aAAa,GAAG,QAAQ,EAAE,MAAM,MAAM;KAC/D,GAAI,MAAM,UAAU,YACpB,MAAM,UAAU,QAChB,MAAM,UAAU,QACZ,EACE,SACD,IACD,CAAE;IACP;GACF;EACF;AAED,MAAI,MAAM,gBAAgB,KACxB,QAAO;GACL,MAAM;GACN,MAAM,EACJ,SAAS,MAAM,GAChB;EACF;AAGH,QAAM,IAAI,MACR,CAAC,qCAAqC,EAAE,MAAM,YAAY,iCAAiC,CAAC;CAE/F;AACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0DD,MAAaC,yCAOT,CAAC,EAAE,SAAS,aAAa,oBAAoB,KAAK;CACpD,MAAMC,eAA6C,QAAQ;AAG3D,SAAQ,QAAQ,MAAhB;EACE,KAAK,aAAa;GAChB,MAAM,YAAY,CAAE;GACpB,MAAM,mBAAmB,CAAE;AAC3B,QAAK,MAAM,eAAe,gBAAgB,CAAE,EAC1C,KAAI;IACF,UAAU,qEAAmB,aAAa,EAAE,UAAU,KAAM,EAAC,CAAC;GAE/D,SAAQC,GAAQ;IACf,iBAAiB,2EAAyB,aAAa,EAAE,QAAQ,CAAC;GACnE;GAEH,MAAMC,oBAA6C;IACjD,eAAe,QAAQ;IACvB,YAAY;GACb;AACD,OAAI,uBAAuB,QACzB,kBAAkB,iBAAiB;GAErC,MAAMC,oBAAyD;IAC7D,gBAAgB;IAChB,YAAY,YAAY;IACxB,GAAI,YAAY,qBACZ;KACE,OAAO,EAAE,GAAG,YAAY,MAAO;KAC/B,oBAAoB,YAAY;IACjC,IACD,CAAE;GACP;AAED,OAAI,QAAQ,OACV,kBAAkB,QAAQ,QAAQ;GAGpC,MAAM,UAAUC,sCACd,QAAQ,WAAW,IACnB,YAAY,UAAU,IAAI,QAC3B;AACD,UAAO,IAAIC,oCAAU;IACnB;IACA,YAAY;IACZ,oBAAoB;IACpB;IACA;IACA,IAAI,YAAY;GACjB;EACF;EACD,QACE,QAAO,IAAIC,sCAAY,QAAQ,WAAW,IAAI,QAAQ,QAAQ;CACjE;AACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA6DD,MAAaC,4CAST,CAAC,EAAE,OAAO,aAAa,oBAAoB,aAAa,KAAK;CAC/D,MAAM,OAAO,MAAM,QAAQ;CAC3B,MAAM,UAAU,MAAM,WAAW;CACjC,IAAIL;AACJ,KAAI,MAAM,eACR,oBAAoB,EAClB,eAAe,MAAM,cACtB;UACQ,MAAM,YACf,oBAAoB,EAClB,YAAY,MAAM,WACnB;MAED,oBAAoB,CAAE;AAExB,KAAI,oBACF,kBAAkB,iBAAiB;AAGrC,KAAI,MAAM,OACR,kBAAkB,QAAQ;EACxB,GAAG,MAAM;EACT,OAAO,YAAY,QAAQ,GAAG;CAC/B;CAGH,MAAM,oBAAoB;EACxB,gBAAgB;EAChB,OAAO,EAAE,GAAG,YAAY,MAAO;CAChC;AACD,KAAI,SAAS,OACX,QAAO,IAAIM,4CAAkB;EAAE;EAAS;CAAmB;UAClD,SAAS,aAAa;EAC/B,MAAMC,iBAAkC,CAAE;AAC1C,MAAI,MAAM,QAAQ,MAAM,WAAW,CACjC,MAAK,MAAM,eAAe,MAAM,YAC9B,eAAe,KAAK;GAClB,MAAM,YAAY,UAAU;GAC5B,MAAM,YAAY,UAAU;GAC5B,IAAI,YAAY;GAChB,OAAO,YAAY;GACnB,MAAM;EACP,EAAC;AAGN,SAAO,IAAIC,yCAAe;GACxB;GACA,kBAAkB;GAClB;GACA,IAAI,YAAY;GAChB;EACD;CACF,WAAU,SAAS,SAClB,QAAO,IAAIC,6CAAmB;EAAE;EAAS;CAAmB;UACnD,SAAS,YAClB,QAAO,IAAIA,6CAAmB;EAC5B;EACA;EACA,mBAAmB,EACjB,iBAAiB,YAClB;CACF;UACQ,SAAS,WAClB,QAAO,IAAIC,+CAAqB;EAC9B;EACA;EACA,MAAM,MAAM;EACZ;CACD;UACQ,SAAS,OAClB,QAAO,IAAIC,2CAAiB;EAC1B;EACA;EACA,cAAc,MAAM;EACpB;CACD;KAED,QAAO,IAAIC,2CAAiB;EAAE;EAAS;EAAM;CAAmB;AAEnE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA8BD,MAAaC,sDAMT,CAAC,UAAU;AACb,KAAI,MAAM,SAAS,SACjB;MAAI,MAAM,IACR,QAAO;GACL,MAAM;GACN,WAAW,EACT,KAAK,MAAM,IACZ;EACF;WACQ,MAAM,KACf,QAAO;GACL,MAAM;GACN,WAAW,EACT,KAAK,CAAC,KAAK,EAAE,MAAM,SAAS,QAAQ,EAAE,MAAM,MAAM,CACnD;EACF;CACF;AAEH,KAAI,MAAM,SAAS,SACjB;MAAI,MAAM,MAAM;GACd,MAAM,6CAAc,MAAM;IACxB,MAAM,GAAGC,SAAO,GAAG,MAAM,SAAS,MAAM,IAAI;AAC5C,QAAIA,aAAW,SAASA,aAAW,MACjC,QAAOA;AAET,WAAO;GACR,EAAC;AACF,UAAO;IACL,MAAM;IACN,aAAa;KACX,MAAM,MAAM,KAAK,UAAU;KAC3B;IACD;GACF;EACF;;AAEH,KAAI,MAAM,SAAS,QAAQ;AACzB,MAAI,MAAM,MAAM;GACd,MAAM,WAAWlB,6CAAgC,MAAM;AAEvD,UAAO;IACL,MAAM;IACN,MAAM;KACJ,WAAW,CAAC,KAAK,EAAE,MAAM,SAAS,QAAQ,EAAE,MAAM,MAAM;KAC9C;IACX;GACF;EACF;AACD,MAAI,MAAM,OACR,QAAO;GACL,MAAM;GACN,MAAM,EACJ,SAAS,MAAM,OAChB;EACF;CAEJ;AACD,QAAO;AACR;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAmED,MAAamB,oDAGT,CAAC,EAAE,SAAS,OAAO,KAAK;CAC1B,IAAI,OAAOC,iCAAoB,QAAQ;AACvC,KAAI,SAAS,YAAYC,8BAAiB,MAAM,EAC9C,OAAO;AAET,KAAI,SAAS,YACX,QAAO;EACL,MAAM;EACN,SAAS,QAAQ,cAAc,OAAO,CAAC,UAAU,MAAM,SAAS,OAAO;CACxE;UACQ,SAAS,SAClB,QAAO;EACL,MAAM;EACN,SAAS,QAAQ,cAAc,OAAO,CAAC,UAAU,MAAM,SAAS,OAAO;CACxE;UACQ,SAAS,YAClB,QAAO;EACL,MAAM;EACN,SAAS,QAAQ,cAAc,OAAO,CAAC,UAAU,MAAM,SAAS,OAAO;CACxE;UACQ,SAAS,UAAUC,sCAAY,WAAW,QAAQ,CAC3D,QAAO;EACL,MAAM;EACN,cAAc,QAAQ;EACtB,SAAS,QAAQ,cAAc,OAAO,CAAC,UAAU,MAAM,SAAS,OAAO;CACxE;UACQ,SAAS,WAClB,QAAO;EACL,MAAM;EACN,MAAM,QAAQ,QAAQ;EACtB,SAAS,QAAQ,cACd,OAAO,CAAC,UAAU,MAAM,SAAS,OAAO,CACxC,KAAK,GAAG;CACZ;CAGH,UAAU,mBAAmBC,QAAiC;AAC5D,OAAK,MAAM,SAAS,QAAQ;AAC1B,OAAI,MAAM,SAAS,QACjB,MAAM;IACJ,MAAM;IACN,MAAM,MAAM;GACb;GAEH,MAAM,OAAO,oDAAoD,MAAM;AACvE,OAAI,MACF,MAAM;EAET;CACF;AACD,QAAO;EACL,MAAM;EACN,SAAS,MAAM,KAAK,mBAAmB,QAAQ,cAAc,CAAC;CAC/D;AACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAyED,MAAaC,4CAGT,CAAC,EAAE,UAAU,OAAO,KAAK;AAC3B,QAAO,SAAS,QAAQ,CAAC,YAAY;AACnC,MACE,oBAAoB,QAAQ,qBAC5B,QAAQ,mBAAmB,mBAAmB,KAE9C,QAAO,kDAAkD,EAAE,QAAS,EAAC;EAEvE,IAAI,OAAOJ,iCAAoB,QAAQ;AACvC,MAAI,SAAS,YAAYC,8BAAiB,MAAM,EAC9C,OAAO;EAGT,MAAM,UACJ,OAAO,QAAQ,YAAY,WACvB,QAAQ,UACR,QAAQ,QAAQ,IAAI,CAAC,MAAM;AACzB,yDAAuB,EAAE,CACvB,qEACE,GACA,oCACD;AAEH,UAAO;EACR,EAAC;EAER,MAAMI,kBAAuC;GAC3C;GACA;EACD;AACD,MAAI,QAAQ,QAAQ,MAClB,gBAAgB,OAAO,QAAQ;AAEjC,MAAI,QAAQ,kBAAkB,iBAAiB,MAC7C,gBAAgB,gBAAgB,QAAQ,kBAAkB;AAE5D,MAAIlB,oCAAU,WAAW,QAAQ,IAAI,CAAC,CAAC,QAAQ,YAAY,QACzD,gBAAgB,aAAa,QAAQ,WAAW,IAC9CmB,8EACD;OACI;AACL,OAAI,QAAQ,kBAAkB,cAAc,MAC1C,gBAAgB,aAAa,QAAQ,kBAAkB;AAEzD,OAAIJ,sCAAY,WAAW,QAAQ,IAAI,QAAQ,gBAAgB,MAC7D,gBAAgB,eAAe,QAAQ;EAE1C;AAED,MACE,QAAQ,kBAAkB,SAC1B,OAAO,QAAQ,kBAAkB,UAAU,YAC3C,QAAQ,QAAQ,kBAAkB,OAClC;GACA,MAAM,eAAe;IACnB,MAAM;IACN,OAAO,EACL,IAAI,QAAQ,kBAAkB,MAAM,GACrC;GACF;AACD,UAAO,CACL,iBACA,YACD;EACF;AAED,SAAO;CACR,EAAC;AACH"}