route-claudecode
Version:
Advanced routing and transformation system for Claude Code outputs to multiple AI providers
301 lines • 12.7 kB
JavaScript
;
/**
* OpenAI Input Format Processor
* Handles incoming requests in OpenAI API format
* 项目所有者: Jason Zhang
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.OpenAIInputProcessor = void 0;
const types_1 = require("@/types");
const logger_1 = require("@/utils/logger");
const optimized_tool_call_detector_1 = require("@/utils/optimized-tool-call-detector");
/**
* Architecture Note: Preprocessing has been moved to the routing layer.
* Input layer now only handles basic format validation and parsing.
* All transformations and patches are handled by the Enhanced Routing Engine.
*/
class OpenAIInputProcessor {
name = 'openai';
/**
* Check if this processor can handle the request
*/
canProcess(request) {
try {
// Check for OpenAI-specific fields
return (typeof request === 'object' &&
request !== null &&
Array.isArray(request.messages) &&
// OpenAI doesn't use 'system' as array (it's in messages)
(request.system === undefined || typeof request.system === 'string') &&
// Check for OpenAI-style tools format
(!request.tools || this.isOpenAIToolsFormat(request.tools)));
}
catch (error) {
logger_1.logger.debug('Error checking if request can be processed by OpenAI processor:', error);
return false;
}
}
/**
* Process the incoming request
*/
async process(request) {
try {
// Validate the request
if (!this.validate(request)) {
throw new types_1.ValidationError('Invalid OpenAI request format');
}
const openaiRequest = request;
const requestId = openaiRequest.metadata?.requestId || 'temp-id';
// Convert OpenAI format to Anthropic-like format for internal processing
const anthropicLikeRequest = this.convertToAnthropicFormat(openaiRequest);
// 🎯 保留优化的工具调用检测,但作为补充验证
const toolDetectionResult = optimized_tool_call_detector_1.optimizedToolCallDetector.detectInRequest(anthropicLikeRequest, requestId);
logger_1.logger.debug('OpenAI input processed through unified preprocessing and tool detection', {
requestId,
hasToolCalls: toolDetectionResult.hasToolCalls,
detectedPatterns: toolDetectionResult.detectedPatterns,
confidence: toolDetectionResult.confidence,
needsBuffering: toolDetectionResult.needsBuffering,
extractedCount: toolDetectionResult.extractedToolCalls?.length || 0,
detectionMethod: toolDetectionResult.detectionMethod,
preprocessingApplied: false, // Preprocessing moved to routing layer
originalFormat: 'openai'
}, requestId, 'openai-input-processor');
// Normalize to our internal format
const baseRequest = {
model: anthropicLikeRequest.model,
messages: this.normalizeMessages(anthropicLikeRequest.messages),
stream: anthropicLikeRequest.stream || false,
max_tokens: anthropicLikeRequest.max_tokens || 131072,
temperature: anthropicLikeRequest.temperature,
// 🔧 Store tools at top level for Provider access
tools: anthropicLikeRequest.tools,
metadata: {
requestId: '', // Will be set by server
...anthropicLikeRequest.metadata,
originalFormat: 'openai',
system: anthropicLikeRequest.system,
tools: anthropicLikeRequest.tools, // Keep copy in metadata for session management
thinking: anthropicLikeRequest.thinking || false,
// 🎯 添加工具调用检测结果到metadata
toolDetection: toolDetectionResult,
// 🆕 添加预处理信息
preprocessing: {
applied: false, // Preprocessing moved to routing layer
timestamp: Date.now()
},
// 保留原始OpenAI格式信息
originalOpenAIRequest: {
tool_choice: openaiRequest.tool_choice,
hasToolCalls: !!openaiRequest.messages.some(m => m.tool_calls),
toolCallCount: openaiRequest.messages.reduce((count, m) => count + (m.tool_calls?.length || 0), 0)
}
}
};
logger_1.logger.debug('Processed OpenAI request:', {
requestId: baseRequest.metadata?.requestId,
model: baseRequest.model,
messageCount: baseRequest.messages.length,
hasTools: !!anthropicLikeRequest.tools?.length,
hasSystem: !!anthropicLikeRequest.system?.length,
isThinking: !!anthropicLikeRequest.thinking,
toolDetectionConfidence: toolDetectionResult.confidence,
needsBuffering: toolDetectionResult.needsBuffering,
detectionMethod: toolDetectionResult.detectionMethod,
preprocessingApplied: baseRequest.metadata?.preprocessing.applied,
originalFormat: 'openai'
});
return baseRequest;
}
catch (error) {
logger_1.logger.error('Error processing OpenAI request:', error);
throw error;
}
}
/**
* Convert OpenAI format to Anthropic-like format for internal processing
*/
convertToAnthropicFormat(openaiRequest) {
const anthropicRequest = {
model: openaiRequest.model,
max_tokens: openaiRequest.max_tokens || 131072,
temperature: openaiRequest.temperature,
stream: openaiRequest.stream || false,
metadata: openaiRequest.metadata
};
// Convert messages
const messages = [];
let systemMessage;
for (const msg of openaiRequest.messages) {
if (msg.role === 'system') {
// Extract system message
systemMessage = typeof msg.content === 'string' ? msg.content : JSON.stringify(msg.content);
}
else {
// Convert regular messages
const convertedMsg = {
role: msg.role === 'tool' ? 'user' : msg.role, // Convert tool messages to user
content: this.convertMessageContent(msg)
};
messages.push(convertedMsg);
}
}
anthropicRequest.messages = messages;
// Add system message if exists
if (systemMessage) {
anthropicRequest.system = [{ type: 'text', text: systemMessage }];
}
// Convert tools from OpenAI to Anthropic format
if (openaiRequest.tools) {
anthropicRequest.tools = openaiRequest.tools.map(tool => ({
name: tool.function.name,
description: tool.function.description,
input_schema: tool.function.parameters
}));
}
// Convert tool_choice
if (openaiRequest.tool_choice) {
if (openaiRequest.tool_choice === 'auto') {
anthropicRequest.tool_choice = { type: 'auto' };
}
else if (openaiRequest.tool_choice === 'none') {
// Don't set tool_choice for 'none'
}
else if (typeof openaiRequest.tool_choice === 'string') {
anthropicRequest.tool_choice = {
type: 'tool',
name: openaiRequest.tool_choice
};
}
else if (typeof openaiRequest.tool_choice === 'object' && openaiRequest.tool_choice.function) {
anthropicRequest.tool_choice = {
type: 'tool',
name: openaiRequest.tool_choice.function.name
};
}
}
return anthropicRequest;
}
/**
* Convert OpenAI message content to Anthropic format
*/
convertMessageContent(msg) {
const content = [];
// Add text content
if (msg.content) {
if (typeof msg.content === 'string') {
content.push({ type: 'text', text: msg.content });
}
else if (Array.isArray(msg.content)) {
// Handle complex content (images, etc.)
msg.content.forEach(block => {
if (block.type === 'text') {
content.push({ type: 'text', text: block.text });
}
else {
content.push(block); // Pass through other types
}
});
}
else if (typeof msg.content === 'object' && msg.content !== null) {
// 🔧 Handle object content format (e.g., { type: "text", text: "..." })
const contentObj = msg.content;
if (contentObj.type === 'text' && contentObj.text) {
content.push({ type: 'text', text: contentObj.text });
}
else {
// Convert object to text content
content.push({ type: 'text', text: JSON.stringify(msg.content) });
}
}
}
// Add tool calls as tool_use blocks
if (msg.tool_calls) {
msg.tool_calls.forEach(toolCall => {
content.push({
type: 'tool_use',
id: toolCall.id,
name: toolCall.function.name,
input: JSON.parse(toolCall.function.arguments || '{}')
});
});
}
// Handle tool results
if (msg.role === 'tool' && msg.tool_call_id) {
content.push({
type: 'tool_result',
tool_use_id: msg.tool_call_id,
content: typeof msg.content === 'string' ? msg.content : JSON.stringify(msg.content)
});
}
// Return single text if only one text block
if (content.length === 1 && content[0].type === 'text') {
return content[0].text;
}
return content.length > 0 ? content : '';
}
/**
* Validate the request format
*/
validate(request) {
try {
return (request &&
typeof request === 'object' &&
typeof request.model === 'string' &&
Array.isArray(request.messages) &&
request.messages.length > 0 &&
request.messages.every((msg) => msg &&
typeof msg.role === 'string' &&
['user', 'assistant', 'system', 'tool'].includes(msg.role) &&
(msg.content !== undefined || msg.tool_calls)));
}
catch (error) {
logger_1.logger.error('OpenAI validation error:', error);
return false;
}
}
/**
* Check if tools are in OpenAI format
*/
isOpenAIToolsFormat(tools) {
if (!Array.isArray(tools) || tools.length === 0) {
return true;
}
// OpenAI tools have 'function.parameters', Anthropic has 'input_schema'
return tools.every(tool => tool &&
tool.type === 'function' &&
tool.function &&
typeof tool.function.name === 'string' &&
typeof tool.function.description === 'string' &&
tool.function.parameters &&
typeof tool.function.parameters === 'object');
}
/**
* Normalize messages to internal format
*/
normalizeMessages(messages) {
return messages.map(message => ({
role: message.role,
content: this.normalizeContent(message.content)
}));
}
/**
* Normalize content to handle both string and array formats
*/
normalizeContent(content) {
if (typeof content === 'string') {
return content;
}
if (Array.isArray(content)) {
return content.map(block => {
if (typeof block === 'string') {
return { type: 'text', text: block };
}
return block;
});
}
return content;
}
}
exports.OpenAIInputProcessor = OpenAIInputProcessor;
//# sourceMappingURL=processor.js.map