codecrucible-synth
Version:
Production-Ready AI Development Platform with Multi-Voice Synthesis, Smithery MCP Integration, Enterprise Security, and Zero-Timeout Reliability
186 lines • 6.2 kB
TypeScript
/**
* StreamingManager - Extracted from UnifiedModelClient
* Handles all streaming-related functionality following Living Spiral methodology
*
* Council Perspectives Applied:
* - Maintainer: Clean interfaces and clear separation of concerns
* - Performance Engineer: Optimized streaming with backpressure handling
* - Security Guardian: Safe token handling and resource cleanup
* - Explorer: Extensible design for future streaming patterns
*/
import { EventEmitter } from 'events';
export interface StreamChunk {
type: 'stream-start' | 'text-start' | 'text-delta' | 'text-end' | 'reasoning-start' | 'reasoning-delta' | 'reasoning-end' | 'tool-input-start' | 'tool-input-delta' | 'tool-input-end' | 'tool-call' | 'tool-result' | 'finish' | 'error';
id?: string;
timestamp: number;
warnings?: StreamWarning[];
delta?: string;
toolCallId?: string;
toolName?: string;
args?: unknown;
result?: unknown;
usage?: StreamUsage;
finishReason?: 'stop' | 'length' | 'content-filter' | 'tool-calls' | 'error';
error?: string;
errorCode?: string;
providerMetadata?: Record<string, unknown>;
content?: string;
finished?: boolean;
metadata?: Record<string, any>;
}
export interface StreamWarning {
type: string;
message: string;
code?: string;
}
export interface StreamUsage {
inputTokens?: number;
outputTokens?: number;
totalTokens?: number;
cacheReadTokens?: number;
cacheWriteTokens?: number;
}
export interface StreamBlock {
id: string;
type: 'text' | 'reasoning' | 'tool-input' | 'tool-call';
startTime: number;
endTime?: number;
content: string[];
metadata?: Record<string, any>;
}
export interface StreamToken {
content: string;
timestamp: number;
index: number;
finished?: boolean;
metadata?: Record<string, any>;
}
export interface StreamConfig {
chunkSize?: number;
bufferSize?: number;
enableBackpressure?: boolean;
timeout?: number;
encoding?: BufferEncoding;
enableReasoningStream?: boolean;
enableToolStreaming?: boolean;
maxRetries?: number;
enableProviderMetadata?: boolean;
enableLifecycleEvents?: boolean;
}
export interface StreamMetrics {
tokensStreamed: number;
streamDuration: number;
averageLatency: number;
throughput: number;
backpressureEvents: number;
}
export interface StreamSession {
id: string;
startTime: number;
tokens: StreamToken[];
chunks: StreamChunk[];
activeBlocks: Map<string, StreamBlock>;
metrics: StreamMetrics;
isActive: boolean;
status: 'active' | 'completed' | 'error' | 'cancelled';
}
export interface IStreamingManager {
startStream(content: string, onToken: (token: StreamToken) => void, config?: StreamConfig): Promise<string>;
startModernStream(content: string, onChunk: (chunk: StreamChunk) => void, config?: StreamConfig): Promise<string>;
streamToolExecution(toolName: string, args: unknown, onChunk: (chunk: StreamChunk) => void): Promise<unknown>;
createSession(sessionId?: string): StreamSession;
getSession(sessionId: string): StreamSession | undefined;
destroySession(sessionId: string): void;
getStreamMetrics(sessionId: string): StreamMetrics | undefined;
getAllMetrics(): Map<string, StreamMetrics>;
updateConfig(config: Partial<StreamConfig>): void;
getConfig(): StreamConfig;
cleanup(): Promise<void>;
}
/**
* StreamingManager Implementation
* Follows Single Responsibility Principle - handles only streaming concerns
*/
export declare class StreamingManager extends EventEmitter implements IStreamingManager {
private config;
private sessions;
private activeStreams;
private defaultConfig;
constructor(config?: Partial<StreamConfig>);
/**
* Setup event handlers for stream monitoring
*/
private setupEventHandlers;
/**
* Enhanced: Start modern streaming with AI SDK v5.0 lifecycle patterns
*/
startModernStream(content: string, onChunk: (chunk: StreamChunk) => void, config?: StreamConfig): Promise<string>;
/**
* Enhanced: Stream tool execution with proper lifecycle
*/
streamToolExecution(toolName: string, args: unknown, onChunk: (chunk: StreamChunk) => void): Promise<unknown>;
/**
* Start streaming content with token-by-token delivery
* Core streaming method with comprehensive error handling
*/
startStream(content: string, onToken: (token: StreamToken) => void, config?: StreamConfig): Promise<string>;
/**
* Create a new streaming session
*/
createSession(sessionId?: string): StreamSession;
/**
* Get existing session
*/
getSession(sessionId: string): StreamSession | undefined;
/**
* Destroy a streaming session and cleanup resources
*/
destroySession(sessionId: string): void;
/**
* Get metrics for a specific session
*/
getStreamMetrics(sessionId: string): StreamMetrics | undefined;
/**
* Get all session metrics
*/
getAllMetrics(): Map<string, StreamMetrics>;
/**
* Update streaming configuration
*/
updateConfig(config: Partial<StreamConfig>): void;
/**
* Get current configuration
*/
getConfig(): StreamConfig;
/**
* Cleanup all sessions and resources
*/
cleanup(): Promise<void>;
/**
* Private: Update metrics for a streaming session
*/
private updateStreamMetrics;
/**
* Private: Handle backpressure by introducing controlled delays
*/
private handleBackpressure;
/**
* Private: Tokenize content into chunks for streaming
*/
private tokenizeContent;
/**
* Enhanced: Generate unique stream ID for AI SDK v5.0 compatibility
*/
private generateStreamId;
/**
* Enhanced: Generate unique block ID for streaming blocks
*/
private generateBlockId;
/**
* Private: Generate unique session ID
*/
private generateSessionId;
}
export declare function createStreamingManager(config?: Partial<StreamConfig>): IStreamingManager;
export default StreamingManager;
//# sourceMappingURL=streaming-manager.d.ts.map