route-claudecode
Version:
Advanced routing and transformation system for Claude Code outputs to multiple AI providers
250 lines • 9.97 kB
JavaScript
"use strict";
/**
* 统一流式处理基类
* 消除重复的流式处理逻辑
*
* 遵循零硬编码、零Fallback、零沉默失败原则
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.OpenAIStreamingHandler = exports.BaseStreamingHandler = void 0;
const logger_1 = require("@/utils/logger");
const response_validation_1 = require("./response-validation");
/**
* 基础流式处理器
* 提供统一的流式请求处理框架
*/
class BaseStreamingHandler {
options;
metrics;
constructor(options) {
this.options = {
enableValidation: true,
...options
};
this.metrics = {
chunkCount: 0,
hasValidContent: false,
startTime: Date.now()
};
}
/**
* 主要的流式处理方法
*/
async *handleStreamRequest(request) {
const requestId = request.metadata?.requestId || 'unknown';
const sessionId = request.metadata?.sessionId;
const conversationId = request.metadata?.conversationId;
this.resetMetrics();
// 生成会话ID(如果有会话信息)
let trackingRequestId = requestId;
if (sessionId && conversationId && this.options.sessionManager) {
trackingRequestId = this.options.sessionManager.generateRequestId(sessionId, conversationId, true);
request.metadata = { ...request.metadata, requestId: trackingRequestId, originalRequestId: requestId };
}
logger_1.logger.debug(`Processing streaming request with ${this.options.providerName}`, {
originalRequestId: requestId,
requestId: trackingRequestId,
sessionId,
conversationId,
provider: this.options.providerName
}, trackingRequestId, 'provider');
try {
// 调用具体的流式处理实现
for await (const chunk of this.processStream(request)) {
this.metrics.chunkCount++;
// 🚨 验证chunk(如果启用)
if (this.options.enableValidation) {
this.validateChunk(chunk, trackingRequestId);
}
// 跟踪有效内容
if (this.isValidContentChunk(chunk)) {
this.metrics.hasValidContent = true;
}
// 提取finish reason
const finishReason = this.extractFinishReason(chunk);
if (finishReason) {
this.metrics.finishReason = finishReason;
}
yield chunk;
}
this.metrics.endTime = Date.now();
// 🚨 验证整体流式输出
if (this.options.enableValidation) {
this.validateStreamOutput(trackingRequestId);
}
// 标记会话完成
if (sessionId && conversationId && this.options.sessionManager) {
this.options.sessionManager.completeRequest(trackingRequestId, this.metrics.finishReason || 'stream_end');
}
logger_1.logger.debug('Streaming request completed successfully', {
chunkCount: this.metrics.chunkCount,
hasValidContent: this.metrics.hasValidContent,
finishReason: this.metrics.finishReason,
duration: this.metrics.endTime - this.metrics.startTime,
requestId: trackingRequestId
}, trackingRequestId, 'provider');
}
catch (error) {
this.metrics.endTime = Date.now();
// 🚨 确保无静默失败
this.logStreamingError(error, trackingRequestId);
// 标记会话失败
if (sessionId && conversationId && this.options.sessionManager) {
this.options.sessionManager.failRequest(trackingRequestId, error);
}
throw error;
}
}
/**
* 非流式请求处理(通过流式实现)
*/
async handleNonStreamRequest(request) {
const requestId = request.metadata?.requestId || 'unknown';
logger_1.logger.debug(`Converting non-streaming request to streaming for ${this.options.providerName}`, {
requestId,
provider: this.options.providerName
}, requestId, 'provider');
// 🎯 通过流式响应收集完整响应
const chunks = [];
let finalResponse = null;
try {
for await (const chunk of this.handleStreamRequest(request)) {
chunks.push(chunk);
// 收集完整响应
if (this.isStreamEndChunk(chunk)) {
finalResponse = await this.buildCompleteResponseFromStream(chunks, request);
break;
}
}
if (!finalResponse) {
throw new Error('Failed to build complete response from stream');
}
logger_1.logger.debug('Successfully converted streaming response to complete response', {
requestId,
chunks: chunks.length,
stopReason: finalResponse.stop_reason
}, requestId, 'provider');
return finalResponse;
}
catch (error) {
logger_1.logger.error(`${this.options.providerName} non-streaming request failed`, {
error: error instanceof Error ? error.message : String(error),
provider: this.options.providerName,
requestId
}, requestId, 'provider');
throw error;
}
}
/**
* 重置指标
*/
resetMetrics() {
this.metrics = {
chunkCount: 0,
hasValidContent: false,
startTime: Date.now()
};
}
/**
* 验证单个chunk
*/
validateChunk(chunk, requestId) {
(0, response_validation_1.validateStreamingChunk)(chunk, requestId, this.options.providerName, this.metrics.chunkCount);
}
/**
* 验证整体流式输出
*/
validateStreamOutput(requestId) {
// 简化验证逻辑
if (this.metrics.chunkCount === 0) {
logger_1.logger.warn('Streaming completed with no chunks received', {
requestId,
providerName: this.options.providerName
});
}
}
/**
* 记录流式错误
*/
logStreamingError(error, requestId) {
console.error(`🚨 [${this.options.providerName}] STREAMING REQUEST FAILED - NO SILENT FAILURE:`);
console.error(` Request ID: ${requestId}`);
console.error(` Chunks Processed: ${this.metrics.chunkCount}`);
console.error(` Had Valid Content: ${this.metrics.hasValidContent}`);
console.error(` Duration: ${this.metrics.endTime ? this.metrics.endTime - this.metrics.startTime : 'unknown'}ms`);
console.error(` Error: ${error instanceof Error ? error.message : String(error)}`);
console.error(` Provider: ${this.options.providerName}`);
console.error(` RESULT: Throwing error to client`);
}
/**
* 获取当前指标
*/
getMetrics() {
return { ...this.metrics };
}
}
exports.BaseStreamingHandler = BaseStreamingHandler;
/**
* OpenAI专用流式处理器
*/
class OpenAIStreamingHandler extends BaseStreamingHandler {
openaiClient;
transformer;
constructor(options, openaiClient, transformer) {
super(options);
this.openaiClient = openaiClient;
this.transformer = transformer;
}
async *processStream(request) {
const requestId = request.metadata?.requestId || 'unknown';
try {
// 🔄 使用transformer转换请求
const openaiRequest = {
...this.transformer.transformBaseRequestToOpenAI(request),
stream: true
};
logger_1.logger.debug('Sending streaming request to OpenAI', {
model: openaiRequest.model,
hasTools: !!(openaiRequest.tools && openaiRequest.tools.length > 0),
messageCount: openaiRequest.messages.length,
requestId
}, requestId, 'provider');
// 🎯 纯粹的OpenAI API调用
const stream = await this.openaiClient.chat.completions.create(openaiRequest);
// 🔄 使用transformer转换流式响应
for await (const chunk of this.transformer.transformOpenAIStreamToAnthropicSSE(stream, request, requestId)) {
yield chunk;
}
}
catch (error) {
logger_1.logger.error('OpenAI streaming request failed', {
error: error instanceof Error ? error.message : String(error),
provider: this.options.providerName,
model: request.model
}, requestId, 'provider');
throw error;
}
}
async buildCompleteResponseFromStream(chunks, originalRequest) {
// 这里可以调用transformer的方法来构建完整响应
// 具体实现可以复用transformer中的逻辑
throw new Error('buildCompleteResponseFromStream not implemented for OpenAI - should use transformer');
}
isValidContentChunk(chunk) {
return !!(chunk?.event === 'content_block_delta' ||
chunk?.event === 'content_block_start' ||
chunk?.event === 'message_start');
}
isStreamEndChunk(chunk) {
return chunk?.event === 'message_stop' ||
(chunk?.event === 'message_delta' && chunk?.data?.delta?.stop_reason);
}
extractFinishReason(chunk) {
if (chunk?.event === 'message_delta' && chunk?.data?.delta?.stop_reason) {
return chunk.data.delta.stop_reason;
}
return undefined;
}
}
exports.OpenAIStreamingHandler = OpenAIStreamingHandler;
//# sourceMappingURL=streaming-handler.js.map