@unified-llm/core
Version:
Unified LLM interface (in-memory).
350 lines • 15.5 kB
JavaScript
// src/providers/openai/responses-api-client.ts
import { accumulateUsage, } from "../../utils/token-utils.js";
import { logTimed, NOOP_LOGGER } from "../../utils/logging.js";
import { createDefaultClock } from "../../utils/timing.js";
import { setupMcpClientsAndTools } from "../../utils/mcp/setup-mcp-tools.js";
import { McpToolCatalog, } from "../../utils/mcp/mcp-tool-catalog.js";
import { executeToolCalls, } from "../../utils/tools/execute-tool-calls.js";
// ---------------------------------------------------------
// Responses API を叩くヘルパー(OpenAI固有)
// ---------------------------------------------------------
async function callResponsesAPI(body, opts) {
var _a, _b, _c, _d;
const DEFAULT_RESPONSES_API_ENDPOINT = "https://api.openai.com/v1/responses";
const { apiKey, isStream, sseCallback, signal } = opts;
const endpoint = typeof opts.endpoint === "string" && opts.endpoint.trim().length > 0
? opts.endpoint.trim()
: DEFAULT_RESPONSES_API_ENDPOINT;
if (isStream) {
if (!body || typeof body !== "object" || Array.isArray(body)) {
throw new Error("callResponsesAPI streaming requires body to be a non-array object.");
}
}
const requestBody = isStream === true
? { ...body, stream: true }
: body;
const res = await fetch(endpoint, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${apiKey}`,
},
body: JSON.stringify(requestBody),
signal,
});
if (!res.ok) {
const text = await res.text().catch(() => "");
throw new Error(`OpenAI Responses API error: ${res.status} ${res.statusText}\n${text}`);
}
const contentType = ((_a = res.headers.get("content-type")) !== null && _a !== void 0 ? _a : "").toLowerCase();
const isEventStream = contentType.includes("text/event-stream");
const shouldStream = isStream === true || isEventStream;
const canProgress = shouldStream && typeof sseCallback === "function";
if (!shouldStream) {
return res.json();
}
if (!res.body) {
throw new Error("OpenAI Responses API error: missing response body.");
}
const reader = res.body.getReader();
const decoder = new TextDecoder();
let buffer = "";
let completedResponse = null;
const emitProgress = (event) => {
if (!canProgress)
return;
try {
sseCallback(event);
}
catch (_a) {
// ignore progress sink errors
}
};
const findBoundaryIndex = (value) => {
const lfIndex = value.indexOf("\n\n");
const crlfIndex = value.indexOf("\r\n\r\n");
if (lfIndex === -1)
return crlfIndex;
if (crlfIndex === -1)
return lfIndex;
return Math.min(lfIndex, crlfIndex);
};
try {
while (true) {
const { value, done } = await reader.read();
if (done)
break;
buffer += decoder.decode(value, { stream: true });
let boundaryIndex = findBoundaryIndex(buffer);
while (boundaryIndex !== -1) {
const boundaryLength = buffer.startsWith("\r\n\r\n", boundaryIndex)
? 4
: 2;
const rawEvent = buffer.slice(0, boundaryIndex);
buffer = buffer.slice(boundaryIndex + boundaryLength);
boundaryIndex = findBoundaryIndex(buffer);
if (!rawEvent.trim())
continue;
const dataLines = rawEvent
.split(/\r\n|\n/)
.filter((line) => line.startsWith("data:"))
.map((line) => line.replace(/^data:\s?/, ""));
const data = dataLines.join("\n").trim();
if (!data || data === "[DONE]")
continue;
let event;
try {
event = JSON.parse(data);
}
catch (_e) {
continue;
}
emitProgress(event);
if ((event === null || event === void 0 ? void 0 : event.type) === "response.completed") {
completedResponse = (_b = event.response) !== null && _b !== void 0 ? _b : null;
break;
}
if ((event === null || event === void 0 ? void 0 : event.type) === "response.incomplete") {
const reason = (_d = (_c = event === null || event === void 0 ? void 0 : event.response) === null || _c === void 0 ? void 0 : _c.incomplete_details) === null || _d === void 0 ? void 0 : _d.reason;
throw new Error(`OpenAI Responses API incomplete: ${reason !== null && reason !== void 0 ? reason : "unknown reason"}`);
}
if ((event === null || event === void 0 ? void 0 : event.type) === "response.failed" || (event === null || event === void 0 ? void 0 : event.type) === "error") {
throw new Error(`OpenAI Responses API error event: ${JSON.stringify(event)}`);
}
}
if (completedResponse)
break;
}
}
finally {
await reader.cancel().catch(() => { });
}
if (!completedResponse) {
throw new Error("OpenAI Responses API error: response.completed not received.");
}
return completedResponse;
}
// ---------------------------------------------------------
// Responses API の出力から最終テキストを取り出すヘルパー(OpenAI固有)
// ---------------------------------------------------------
function getOutputText(response) {
if (typeof (response === null || response === void 0 ? void 0 : response.output_text) === "string") {
return response.output_text;
}
if (Array.isArray(response === null || response === void 0 ? void 0 : response.output)) {
const messageTexts = [];
for (const item of response.output) {
if (item.type === "message" && Array.isArray(item.content)) {
const parts = item.content
.filter((c) => (c === null || c === void 0 ? void 0 : c.type) === "output_text" && typeof c.text === "string")
.map((c) => c.text);
if (parts.length > 0) {
messageTexts.push(parts.join(""));
}
}
}
if (messageTexts.length > 0) {
return messageTexts.join("\n");
}
}
throw new Error("No text output found in Responses API result.");
}
/**
* OpenAI Responses API の output から function_call を抽出する。
*/
function getFunctionCallsFromResponse(response) {
const output = Array.isArray(response === null || response === void 0 ? void 0 : response.output) ? response.output : [];
return output.filter((item) => (item === null || item === void 0 ? void 0 : item.type) === "function_call");
}
/**
* OpenAIの function_call を中立形(NormalizedToolCall)へ変換する。
*/
function toNormalizedToolCalls(functionCalls) {
return functionCalls.map((fc) => {
var _a;
const callId = (_a = fc.call_id) !== null && _a !== void 0 ? _a : fc.id;
if (!callId) {
throw new Error(`Missing call_id for function call: ${fc.name}`);
}
return {
name: fc.name,
callId,
arguments: fc.arguments,
};
});
}
/**
* OpenAI Responses API の tool calling loop を実行する。
*/
async function runToolCallingLoop(options) {
const { baseInput, openAiTools, toolNameToClient, localToolHandlers, usage, model, apiKey, endpoint, isStream, thread, structuredOutput, temperature, truncation, sseCallback, signal, logger, clock, } = options;
const maxLoops = Number(process.env.RESPONSES_MAX_LOOPS) || 10;
const buildRequestBody = (input, previousResponseId) => ({
model,
input,
previous_response_id: previousResponseId,
tools: openAiTools,
tool_choice: "auto",
parallel_tool_calls: true,
...(structuredOutput ? { text: structuredOutput } : {}),
...(truncation ? { truncation } : {}),
...(temperature !== undefined ? { temperature } : {}),
});
const requestContext = thread
? thread.buildRequestContextForResponsesAPI(baseInput)
: { input: baseInput };
let response = await logTimed(logger, clock, "llm.step.completed", {
model,
previous_response_id: requestContext.previous_response_id,
}, async () => callResponsesAPI(buildRequestBody(requestContext.input, requestContext.previous_response_id), {
apiKey,
endpoint: endpoint,
isStream,
sseCallback,
signal,
}), "info");
logger.debug("responses.api.result", {
responseJson: JSON.stringify(response, null, 2),
});
accumulateUsage(usage, response === null || response === void 0 ? void 0 : response.usage);
thread === null || thread === void 0 ? void 0 : thread.updatePreviousResponseId(response === null || response === void 0 ? void 0 : response.id);
if (thread && Array.isArray(response === null || response === void 0 ? void 0 : response.output)) {
thread.appendToHistory(response.output);
}
let lastResponse = response;
for (let loop = 0; loop < maxLoops; loop++) {
const functionCalls = getFunctionCallsFromResponse(response);
if (functionCalls.length === 0) {
break;
}
// OpenAI固有の function_call → 中立形
const normalizedCalls = toNormalizedToolCalls(functionCalls);
// ツール実行は中立化した共通エンジンで(local優先→MCP)
const normalizedResults = await executeToolCalls(normalizedCalls, toolNameToClient, localToolHandlers, { logger, clock });
// 中立形の結果 → OpenAIの function_call_output 形式
const apiFunctionOutputs = normalizedResults.map((r) => ({
type: "function_call_output",
call_id: r.callId,
output: r.output,
}));
thread === null || thread === void 0 ? void 0 : thread.appendToHistory(apiFunctionOutputs);
response = await logTimed(logger, clock, "llm.step.completed", {
model,
previous_response_id: response.id,
}, async () => callResponsesAPI(buildRequestBody(apiFunctionOutputs, response.id), {
apiKey,
endpoint: endpoint,
isStream,
sseCallback,
signal,
}), "info");
logger.debug("responses.api.result", {
responseJson: JSON.stringify(response, null, 2),
});
accumulateUsage(usage, response === null || response === void 0 ? void 0 : response.usage);
thread === null || thread === void 0 ? void 0 : thread.updatePreviousResponseId(response === null || response === void 0 ? void 0 : response.id);
if (thread && Array.isArray(response === null || response === void 0 ? void 0 : response.output)) {
thread.appendToHistory(response.output);
}
lastResponse = response;
}
return lastResponse;
}
/**
* OpenAI Responses API を用いた Agent 実行(MCP/ローカルツール対応)
*/
export async function callResponsesApiAgent(options) {
var _a, _b;
const { mcpServers, model, apiKey, endpoint, isStream, baseInput, thread, structuredOutput, config, localTools, sseCallback, signal, logger: loggerOption, clock: clockOption, } = options;
const logger = loggerOption !== null && loggerOption !== void 0 ? loggerOption : NOOP_LOGGER;
const clock = clockOption !== null && clockOption !== void 0 ? clockOption : createDefaultClock();
const resolvedApiKey = apiKey !== null && apiKey !== void 0 ? apiKey : process.env.OPENAI_API_KEY;
if (!resolvedApiKey) {
throw new Error("OPENAI_API_KEY is missing.");
}
const temperature = (_a = config === null || config === void 0 ? void 0 : config.temperature) !== null && _a !== void 0 ? _a : undefined;
const truncation = (_b = config === null || config === void 0 ? void 0 : config.truncation) !== null && _b !== void 0 ? _b : undefined;
let mcpClients = [];
let toolNameToClient = new Map();
let openAiTools = [];
const usage = {
inputTokens: 0,
outputTokens: 0,
totalTokens: 0,
cachedInputTokens: 0,
};
let lastResponse;
try {
// ---------------------------------------------------------
// 1) MCP 接続&ツール収集
// ---------------------------------------------------------
const setup = await setupMcpClientsAndTools({
mcpServers,
clientName: "local-mcp-responses-client",
clientVersion: "1.0.0",
});
mcpClients = setup.mcpClients;
toolNameToClient = setup.toolNameToClient;
openAiTools = new McpToolCatalog(setup.mcpTools).toOpenAiTools();
if (localTools) {
const seenLocalToolNames = new Set();
for (const tool of localTools.tools) {
if (toolNameToClient.has(tool.name)) {
throw new Error(`Tool name collision between MCP and local tools: ${tool.name}`);
}
if (seenLocalToolNames.has(tool.name)) {
throw new Error(`Duplicate local tool name: ${tool.name}`);
}
if (!localTools.handlers.has(tool.name)) {
throw new Error(`Missing local tool handler: ${tool.name}`);
}
seenLocalToolNames.add(tool.name);
}
openAiTools.push(...localTools.tools);
}
// ---------------------------------------------------------
// 2) LLM with Tool Call ループ呼び出し
// ---------------------------------------------------------
lastResponse = await runToolCallingLoop({
baseInput,
openAiTools,
toolNameToClient,
localToolHandlers: localTools === null || localTools === void 0 ? void 0 : localTools.handlers,
usage,
model,
apiKey: resolvedApiKey,
endpoint,
isStream,
thread,
structuredOutput,
truncation,
temperature,
sseCallback,
signal,
logger,
clock,
});
if (!lastResponse) {
throw new Error("No response from OpenAI Responses API.");
}
// ---------------------------------------------------------
// 3) 最終出力
// ---------------------------------------------------------
const outputText = getOutputText(lastResponse);
logger.info("responses.output_text", { outputText });
return { output: outputText, usage, rawResponse: lastResponse };
}
finally {
await Promise.allSettled(mcpClients.map(async (client) => {
try {
if (typeof (client === null || client === void 0 ? void 0 : client.close) === "function") {
await client.close();
}
}
catch (_a) {
// ignore
}
}));
}
}
//# sourceMappingURL=responses-api-agent.js.map