UNPKG

ai-platform-converter

Version:

Lossless API parameter conversion between multiple AI platforms (OpenAI, Anthropic, Gemini, DeepSeek, Wenwen, Vertex AI, Huawei, BigModel)

66 lines (65 loc) 2.29 kB
"use strict"; /** * Convert OpenAI response to Gemini format */ Object.defineProperty(exports, "__esModule", { value: true }); exports.convertOpenAIResponseToGemini = convertOpenAIResponseToGemini; const common_1 = require("../../types/common"); const helpers_1 = require("../../utils/helpers"); function convertOpenAIResponseToGemini(response, options) { const choice = response.choices[0]; const message = choice.message; // Convert message to Gemini parts const parts = []; // Add text content if (message.content) { const textContent = typeof message.content === 'string' ? message.content : message.content.map(c => c.text || '').join('\n'); if (textContent) { parts.push({ text: textContent }); } } // Add tool calls if (message.tool_calls && message.tool_calls.length > 0) { for (const tc of message.tool_calls) { parts.push({ functionCall: { name: tc.function.name, args: (0, helpers_1.safeJsonParse)(tc.function.arguments) } }); } } // Map finish reason const finishReason = (0, helpers_1.mapFinishReason)(choice.finish_reason, common_1.Platform.OpenAI, common_1.Platform.Gemini); // Build candidate const candidate = { content: { role: 'model', parts }, finishReason, index: choice.index }; // Build Gemini response const geminiResponse = { candidates: [candidate], usageMetadata: response.usage ? { promptTokenCount: response.usage.prompt_tokens, candidatesTokenCount: response.usage.completion_tokens, totalTokenCount: response.usage.total_tokens } : undefined, // Preserve extensions _extensions: { platform: common_1.Platform.OpenAI, originalParams: options?.preserveExtensions ? (0, helpers_1.deepClone)(response) : undefined, openai: { id: response.id, system_fingerprint: response.system_fingerprint, logprobs: choice.logprobs } } }; return (0, helpers_1.removeUndefined)(geminiResponse); }