UNPKG

agent-workflow

Version:

A powerful workflow engine supporting DAG (Directed Acyclic Graph) task scheduling, dynamic task generation, and intelligent strategy systems.

1 lines 71.6 kB
{"version":3,"file":"agent-workflow.umd.cjs","sources":["../src/workflow/ContextManager.ts","../src/workflow/WorkflowBuilder.ts","../src/workflow/TaskRegistry.ts"],"sourcesContent":["//上下文管理器\n// 上下文用于任务之间的数据共享,并动态更新\nimport type { TaskInput } from \"./Task\";\n\nexport class ContextManager {\n private context: TaskInput = {};\n\n // 获取上下文中的数据\n // biome-ignore lint/suspicious/noExplicitAny: <explanation>\n get(key: string): any {\n return this.context[key];\n }\n\n // 设置上下文中的数据\n // biome-ignore lint/suspicious/noExplicitAny: <explanation>\n set(key: string, value: any): void {\n this.context[key] = value;\n }\n\n // 获取完整上下文\n getAll(): TaskInput {\n return this.context;\n }\n\n // 清除上下文\n clear(): void {\n this.context = {};\n }\n}\n","import { ContextManager } from './ContextManager';\nimport type { TaskInput } from './Task';\n\n// DAG任务抽象基类定义 - 确保依赖关系一致性\nexport abstract class DAGTask {\n abstract name: string;\n public dependsOn: DAGTask[] = [];\n\n constructor(dependencies: DAGTask[] = []) {\n this.dependsOn = dependencies;\n }\n\n abstract execute(input: TaskInput): Promise<Record<string, any>>;\n}\n\n// 🔄 流式DAG任务抽象类(扩展)\nexport abstract class StreamingDAGTask extends DAGTask {\n executeStream?(\n input: TaskInput\n ): AsyncGenerator<StreamChunk, Record<string, any>, unknown>;\n isStreaming?: boolean;\n}\n\n// 🌊 流式数据块\nexport interface StreamChunk {\n type: 'progress' | 'data' | 'error' | 'complete';\n taskName: string;\n content?: any;\n progress?: number;\n timestamp: number;\n metadata?: Record<string, any>;\n}\n\n// 🔄 流式工作流结果\nexport interface StreamingWorkflowResult {\n stream: AsyncGenerator<StreamChunk, WorkflowResult, unknown>;\n getResult(): Promise<WorkflowResult>;\n}\n\n// 核心配置接口\nexport interface WorkflowConfig {\n enableStreaming?: boolean;\n retryAttempts?: number;\n timeoutMs?: number;\n maxDynamicSteps?: number;\n}\n\n// 统一的工作流上下文\nexport interface WorkflowContext {\n readonly data: Record<string, unknown>;\n get<T>(key: string): T | undefined;\n set<T>(key: string, value: T): void;\n getAll(): Record<string, unknown>;\n clear(): void;\n getExecutionHistory(): TaskExecutionResult[];\n getLastResult(): any;\n}\n\n// 任务执行结果\nexport interface TaskExecutionResult {\n taskName: string;\n status: 'completed' | 'failed' | 'skipped';\n output?: any;\n error?: string;\n duration: number;\n timestamp: number;\n}\n\n// 工作流执行结果\nexport interface WorkflowResult<T = any> {\n success: boolean;\n data?: T;\n error?: Error;\n executionTime: number;\n taskResults: Map<string, TaskExecutionResult>;\n dynamicTasksGenerated?: number;\n totalSteps?: number;\n}\n\n// 动态任务生成策略\nexport interface DynamicStrategy {\n name: string;\n condition: (context: WorkflowContext, result?: any) => boolean;\n generator: (context: WorkflowContext) => Promise<DAGTask[]>;\n priority?: number;\n once?: boolean;\n}\n\n// 🤖 AI SDK 兼容的流式任务抽象类\nexport abstract class AISDKStreamingTask extends DAGTask {\n executeStreamAI?(input: TaskInput): Promise<{\n textStream?: AsyncIterable<string>;\n fullStream?: AsyncIterable<any>;\n toDataStreamResponse?: () => Response;\n toReadableStream?: () => ReadableStream;\n }>;\n isAISDKStreaming?: boolean;\n}\n\n// 🌊 AI SDK 流式工作流结果\nexport interface AISDKStreamingWorkflowResult {\n textStream: AsyncIterable<string>;\n fullStream: AsyncIterable<any>;\n toDataStreamResponse(): Response;\n toReadableStream(): ReadableStream;\n getResult(): Promise<WorkflowResult>;\n}\n\n// 主要的工作流构建器\nexport class WorkflowBuilder {\n private config: WorkflowConfig = {};\n private staticTasks: DAGTask[] = [];\n\n private dynamicStrategies: DynamicStrategy[] = [];\n\n // 🔒 私有构造函数 - 防止直接实例化\n private constructor() {}\n\n // 🏭 工厂方法 - 唯一创建实例的方式\n static create(): WorkflowBuilder {\n return new WorkflowBuilder();\n }\n\n // 配置方法\n withConfig(config: Partial<WorkflowConfig>): this {\n this.config = { ...this.config, ...config };\n return this;\n }\n\n withRetry(attempts: number): this {\n this.config.retryAttempts = attempts;\n return this;\n }\n\n withTimeout(timeoutMs: number): this {\n this.config.timeoutMs = timeoutMs;\n return this;\n }\n\n // 静态任务构建\n addTask(task: DAGTask): this {\n this.staticTasks.push(task);\n return this;\n }\n\n addTasks(tasks: DAGTask[]): this {\n this.staticTasks.push(...tasks);\n return this;\n }\n\n // 添加动态策略\n addDynamicStrategy(strategy: DynamicStrategy): this {\n this.dynamicStrategies.push(strategy);\n return this;\n }\n\n // 条件任务生成 - 语法糖\n whenCondition(\n condition: (context: WorkflowContext) => boolean,\n taskGenerator: (context: WorkflowContext) => Promise<DAGTask[]>\n ): this {\n return this.addDynamicStrategy({\n name: `condition-${this.dynamicStrategies.length}`,\n condition: (context) => {\n // Wait for all current tasks to complete before checking conditions\n const history = context.getExecutionHistory();\n const completedTasks = history.filter((h) => h.status === 'completed');\n\n // If there are completed tasks, check the condition\n if (completedTasks.length > 0) {\n return condition(context);\n }\n return false;\n },\n generator: taskGenerator,\n once: true,\n });\n }\n\n // 基于结果的任务生成\n onTaskComplete(\n taskName: string,\n resultProcessor: (\n result: any,\n context: WorkflowContext\n ) => Promise<DAGTask[]>\n ): this {\n return this.addDynamicStrategy({\n name: `on-complete-${taskName}`,\n condition: (context) => {\n const history = context.getExecutionHistory();\n return history.some(\n (h) => h.taskName === taskName && h.status === 'completed'\n );\n },\n generator: async (context) => {\n const history = context.getExecutionHistory();\n const taskResult = history.find((h) => h.taskName === taskName);\n return resultProcessor(taskResult?.output, context);\n },\n once: true,\n });\n }\n\n // 基于上下文变化的任务生成\n onContextChange(\n contextKey: string,\n taskGenerator: (value: any, context: WorkflowContext) => Promise<DAGTask[]>\n ): this {\n return this.addDynamicStrategy({\n name: `on-context-${contextKey}`,\n condition: (context) => {\n // Check if there's a new context value that hasn't been triggered before\n const value = context.get(contextKey);\n return value !== undefined;\n },\n generator: async (context) => {\n const value = context.get(contextKey);\n return taskGenerator(value, context);\n },\n once: true,\n });\n }\n\n // 构建工作流实例\n build(): Workflow {\n if (this.dynamicStrategies.length > 0) {\n return new StrategyBasedWorkflow(\n this.config,\n this.staticTasks,\n this.dynamicStrategies\n );\n }\n\n return new StaticWorkflow(this.config, this.staticTasks);\n }\n\n // 🌊 构建流式工作流实例\n buildStreaming(): StreamingWorkflow {\n this.config.enableStreaming = true;\n\n if (this.dynamicStrategies.length > 0) {\n return new StreamingStrategyBasedWorkflow(\n this.config,\n this.staticTasks,\n this.dynamicStrategies\n );\n }\n\n return new StreamingStaticWorkflow(this.config, this.staticTasks);\n }\n\n // 🤖 构建AI SDK兼容的流式工作流\n buildAISDKStreaming(): AISDKStreamingWorkflow {\n this.config.enableStreaming = true;\n\n if (this.dynamicStrategies.length > 0) {\n return new AISDKStreamingStrategyWorkflow(\n this.config,\n this.staticTasks,\n this.dynamicStrategies\n );\n }\n\n return new AISDKStreamingStaticWorkflow(this.config, this.staticTasks);\n }\n}\n\n// 工作流接口\nexport interface Workflow {\n execute(input?: TaskInput): Promise<WorkflowResult>;\n executeStream?(input?: TaskInput): StreamingWorkflowResult;\n getContext(): WorkflowContext;\n getResults(): Map<string, TaskExecutionResult>;\n}\n\n// 🌊 流式工作流接口\nexport interface StreamingWorkflow extends Workflow {\n executeStream(input?: TaskInput): StreamingWorkflowResult;\n}\n\n// 🤖 AI SDK 兼容的流式工作流接口\nexport interface AISDKStreamingWorkflow extends Workflow {\n executeStreamAISDK(input?: TaskInput): AISDKStreamingWorkflowResult;\n}\n\n// 增强的上下文实现\nclass EnhancedWorkflowContext implements WorkflowContext {\n private internalContext: ContextManager;\n private executionHistory: TaskExecutionResult[] = [];\n\n constructor() {\n this.internalContext = new ContextManager();\n }\n\n get data(): Record<string, unknown> {\n return this.internalContext.getAll();\n }\n\n get<T>(key: string): T | undefined {\n return this.internalContext.get(key);\n }\n\n set<T>(key: string, value: T): void {\n this.internalContext.set(key, value);\n }\n\n getAll(): Record<string, unknown> {\n return this.internalContext.getAll();\n }\n\n clear(): void {\n this.internalContext.clear();\n this.executionHistory = [];\n }\n\n getExecutionHistory(): TaskExecutionResult[] {\n return [...this.executionHistory];\n }\n\n getLastResult(): any {\n const lastExecution =\n this.executionHistory[this.executionHistory.length - 1];\n return lastExecution?.output;\n }\n\n addExecutionResult(result: TaskExecutionResult): void {\n this.executionHistory.push(result);\n }\n\n // 内部方法,用于访问原始ContextManager\n getInternalContext(): ContextManager {\n return this.internalContext;\n }\n}\n\n// 基础工作流类\nabstract class BaseWorkflow {\n protected context: EnhancedWorkflowContext;\n protected config: WorkflowConfig;\n protected startTime = 0;\n protected taskResults: Map<string, TaskExecutionResult> = new Map();\n\n constructor(config: WorkflowConfig) {\n this.config = config;\n this.context = new EnhancedWorkflowContext();\n }\n\n abstract execute(input?: TaskInput): Promise<WorkflowResult>;\n\n getContext(): WorkflowContext {\n return this.context;\n }\n\n getResults(): Map<string, TaskExecutionResult> {\n return new Map(this.taskResults);\n }\n\n protected async executeTask(task: DAGTask): Promise<void> {\n const taskStartTime = Date.now();\n\n try {\n // 直接执行任务\n const input = this.context.getAll();\n const output = await task.execute(input);\n\n // 将任务输出存储在任务名称下\n const taskName = task.name || '';\n this.context.set(taskName, output);\n\n // 也将输出的各个字段直接存储到上下文(保持兼容性)\n for (const [key, value] of Object.entries(output)) {\n this.context.set(key, value);\n }\n\n // 生成唯一的任务键,处理重复名称\n let uniqueKey = taskName;\n let counter = 1;\n while (this.taskResults.has(uniqueKey)) {\n uniqueKey = `${taskName}_${counter}`;\n counter++;\n }\n\n const result: TaskExecutionResult = {\n taskName: taskName,\n status: 'completed',\n output: output,\n duration: Date.now() - taskStartTime,\n timestamp: Date.now(),\n };\n\n this.taskResults.set(uniqueKey, result);\n this.context.addExecutionResult(result);\n } catch (error) {\n // 生成唯一的任务键,处理重复名称\n const taskName = task.name || '';\n let uniqueKey = taskName;\n let counter = 1;\n while (this.taskResults.has(uniqueKey)) {\n uniqueKey = `${taskName}_${counter}`;\n counter++;\n }\n\n const result: TaskExecutionResult = {\n taskName: taskName,\n status: 'failed',\n error: error instanceof Error ? error.message : String(error),\n duration: Date.now() - taskStartTime,\n timestamp: Date.now(),\n };\n\n this.taskResults.set(uniqueKey, result);\n this.context.addExecutionResult(result);\n throw error;\n }\n }\n}\n\n// 静态工作流实现\nclass StaticWorkflow extends BaseWorkflow implements Workflow {\n protected tasks: DAGTask[];\n\n constructor(config: WorkflowConfig, tasks: DAGTask[]) {\n super(config);\n this.tasks = tasks;\n }\n\n async execute(input: TaskInput = {}): Promise<WorkflowResult> {\n this.startTime = Date.now();\n\n try {\n // Set initial input\n Object.entries(input).forEach(([key, value]) => {\n this.context.set(key, value);\n });\n\n // Execute DAG tasks\n await this.executeDAG();\n\n return {\n success: true,\n data: this.context.getAll(),\n executionTime: Date.now() - this.startTime,\n taskResults: this.taskResults,\n };\n } catch (error) {\n return {\n success: false,\n error: error as Error,\n executionTime: Date.now() - this.startTime,\n taskResults: this.taskResults,\n };\n }\n }\n\n private async executeDAG(): Promise<void> {\n const levels = this.computeExecutionLevels();\n let hasError = false;\n let lastError: Error | undefined;\n\n for (const level of levels) {\n // Execute all tasks at the current level, don't stop due to individual task failures\n const results = await Promise.allSettled(\n level.map((task) => this.executeTask(task))\n );\n\n // Check for failed tasks\n for (const result of results) {\n if (result.status === 'rejected') {\n hasError = true;\n lastError = result.reason;\n }\n }\n }\n\n // If any tasks failed, throw the last error\n if (hasError && lastError) {\n throw lastError;\n }\n }\n\n protected computeExecutionLevels(): DAGTask[][] {\n // Efficient topological sorting implementation\n const graph = new Map<DAGTask, DAGTask[]>();\n const inDegree = new Map<DAGTask, number>();\n\n // Initialize\n for (const task of this.tasks) {\n graph.set(task, []);\n inDegree.set(task, 0);\n }\n\n // Build dependency graph\n for (const task of this.tasks) {\n if (task.dependsOn) {\n for (const dep of task.dependsOn) {\n graph.get(dep)?.push(task);\n inDegree.set(task, (inDegree.get(task) || 0) + 1);\n }\n }\n }\n\n // Layered execution\n const levels: DAGTask[][] = [];\n const queue = this.tasks.filter((task) => inDegree.get(task) === 0);\n let processedCount = 0;\n\n while (queue.length > 0) {\n const currentLevel = [...queue];\n levels.push(currentLevel);\n processedCount += currentLevel.length;\n queue.length = 0;\n\n for (const task of currentLevel) {\n for (const next of graph.get(task) || []) {\n const newDegree = (inDegree.get(next) || 0) - 1;\n inDegree.set(next, newDegree);\n if (newDegree === 0) {\n queue.push(next);\n }\n }\n }\n }\n\n // Detect circular dependencies\n if (processedCount < this.tasks.length) {\n throw new Error(\n 'Circular dependency detected, unable to execute workflow'\n );\n }\n\n return levels;\n }\n}\n\n// 策略驱动的动态工作流\nclass StrategyBasedWorkflow extends BaseWorkflow implements Workflow {\n protected tasks: DAGTask[];\n protected strategies: DynamicStrategy[];\n protected dynamicTasksGenerated = 0;\n protected currentStep = 0;\n protected usedStrategies: Set<string> = new Set();\n\n constructor(\n config: WorkflowConfig,\n tasks: DAGTask[],\n strategies: DynamicStrategy[]\n ) {\n super(config);\n this.tasks = [...tasks];\n this.strategies = strategies;\n }\n\n async execute(input: TaskInput = {}): Promise<WorkflowResult> {\n this.startTime = Date.now();\n this.dynamicTasksGenerated = 0;\n this.currentStep = 0;\n this.usedStrategies.clear();\n\n try {\n // Set initial input\n Object.entries(input).forEach(([key, value]) => {\n this.context.set(key, value);\n });\n\n // Dynamic execution loop\n while (this.hasTasksToExecute() && this.shouldContinue()) {\n this.currentStep++;\n\n // Execute current batch of tasks\n await this.executeCurrentBatch();\n\n // Evaluate strategies and generate new tasks\n await this.evaluateStrategiesAndGenerateTasks();\n }\n\n // Check if unable to continue due to circular dependencies\n if (!this.hasTasksToExecute() && this.shouldContinue()) {\n const processedTaskNames = new Set(\n this.context\n .getExecutionHistory()\n .filter(\n (h) =>\n h.status === 'completed' ||\n h.status === 'failed' ||\n h.status === 'skipped'\n )\n .map((h) => h.taskName)\n );\n\n const unprocessedTasks = this.tasks.filter(\n (task) => !processedTaskNames.has(task.name || '')\n );\n\n if (unprocessedTasks.length > 0) {\n throw new Error(\n 'Circular dependency detected, unable to execute workflow'\n );\n }\n }\n\n return {\n success: true,\n data: this.context.getAll(),\n executionTime: Date.now() - this.startTime,\n taskResults: this.taskResults,\n dynamicTasksGenerated: this.dynamicTasksGenerated,\n totalSteps: this.currentStep,\n };\n } catch (error) {\n return {\n success: false,\n error: error as Error,\n executionTime: Date.now() - this.startTime,\n taskResults: this.taskResults,\n dynamicTasksGenerated: this.dynamicTasksGenerated,\n totalSteps: this.currentStep,\n };\n }\n }\n\n protected hasTasksToExecute(): boolean {\n const readyTasks = this.getReadyTasks();\n return readyTasks.length > 0;\n }\n\n protected shouldContinue(): boolean {\n const maxSteps = this.config.maxDynamicSteps || 50;\n return this.currentStep < maxSteps;\n }\n\n private async executeCurrentBatch(): Promise<void> {\n const readyTasks = this.getReadyTasks();\n\n if (readyTasks.length === 0) return;\n\n // Execute ready tasks in parallel, don't stop due to individual task failures\n const results = await Promise.allSettled(\n readyTasks.map((task) => this.executeTask(task))\n );\n\n // Log failed tasks but continue execution\n for (const result of results) {\n if (result.status === 'rejected') {\n console.warn('Task execution failed:', result.reason);\n }\n }\n }\n\n protected getReadyTasks(): DAGTask[] {\n const processedTaskNames = new Set(\n this.context\n .getExecutionHistory()\n .filter(\n (h) =>\n h.status === 'completed' ||\n h.status === 'failed' ||\n h.status === 'skipped'\n )\n .map((h) => h.taskName)\n );\n\n return this.tasks.filter((task) => {\n // Check if already processed (completed, failed, or skipped)\n if (processedTaskNames.has(task.name || '')) {\n return false;\n }\n\n // Check if dependencies are satisfied (as long as dependency tasks have been processed, regardless of success/failure)\n if (task.dependsOn) {\n return task.dependsOn.every((dep) =>\n processedTaskNames.has(dep.name || '')\n );\n }\n\n return true;\n });\n }\n\n protected async evaluateStrategiesAndGenerateTasks(): Promise<void> {\n // Sort strategies by priority\n const sortedStrategies = [...this.strategies].sort(\n (a, b) => (b.priority || 0) - (a.priority || 0)\n );\n\n for (const strategy of sortedStrategies) {\n // Skip already used one-time strategies\n if (strategy.once && this.usedStrategies.has(strategy.name)) {\n continue;\n }\n\n try {\n const lastResult = this.context.getLastResult();\n\n if (strategy.condition(this.context, lastResult)) {\n const newTasks = await strategy.generator(this.context);\n\n if (newTasks.length > 0) {\n this.tasks.push(...newTasks);\n this.dynamicTasksGenerated += newTasks.length;\n\n if (strategy.once) {\n this.usedStrategies.add(strategy.name);\n }\n\n console.log(\n `🎯 Strategy \"${strategy.name}\" generated ${newTasks.length} new tasks`\n );\n }\n }\n } catch (error) {\n console.error(`Strategy \"${strategy.name}\" execution failed:`, error);\n }\n }\n }\n}\n\n// 🌊 流式静态工作流实现\nclass StreamingStaticWorkflow\n extends StaticWorkflow\n implements StreamingWorkflow\n{\n private streamResult: WorkflowResult | undefined;\n\n executeStream(input: TaskInput = {}): StreamingWorkflowResult {\n this.streamResult = undefined; // Reset\n const stream = this.createExecutionStream(input);\n\n const resultPromise = (async (): Promise<WorkflowResult> => {\n // Consume stream until completion, generator will set streamResult\n for await (const chunk of stream) {\n // Stream processing\n }\n\n // Return streaming execution result\n return (\n this.streamResult || {\n success: false,\n error: new Error('Streaming execution not completed'),\n executionTime: 0,\n taskResults: new Map(),\n }\n );\n })();\n\n return {\n stream: this.createExecutionStream(input),\n getResult: () => resultPromise,\n };\n }\n\n private async *createExecutionStream(\n input: TaskInput\n ): AsyncGenerator<StreamChunk, WorkflowResult, unknown> {\n this.startTime = Date.now();\n\n try {\n // Set initial input\n Object.entries(input).forEach(([key, value]) => {\n this.context.set(key, value);\n });\n\n yield {\n type: 'progress',\n taskName: 'workflow',\n content: 'Workflow execution started',\n progress: 0,\n timestamp: Date.now(),\n };\n\n // Execute DAG tasks with streaming output\n yield* this.executeDAGStream();\n\n const result: WorkflowResult = {\n success: true,\n data: this.context.getAll(),\n executionTime: Date.now() - this.startTime,\n taskResults: this.taskResults,\n };\n\n // Save result for getResult usage\n this.streamResult = result;\n\n yield {\n type: 'complete',\n taskName: 'workflow',\n content: 'Workflow execution completed',\n progress: 100,\n timestamp: Date.now(),\n };\n\n return result;\n } catch (error) {\n const errorResult: WorkflowResult = {\n success: false,\n error: error as Error,\n executionTime: Date.now() - this.startTime,\n taskResults: this.taskResults,\n };\n\n // Save error result\n this.streamResult = errorResult;\n\n yield {\n type: 'error',\n taskName: 'workflow',\n content: error instanceof Error ? error.message : String(error),\n timestamp: Date.now(),\n };\n\n return errorResult;\n }\n }\n\n private async *executeDAGStream(): AsyncGenerator<\n StreamChunk,\n void,\n unknown\n > {\n const levels = this.computeExecutionLevels();\n const totalTasks = this.tasks.length;\n let completedTasks = 0;\n\n for (const level of levels) {\n // Execute all tasks at the current level\n const taskPromises = level.map((task) => this.executeTaskStream(task));\n\n for await (const taskStream of taskPromises) {\n for await (const chunk of taskStream) {\n yield chunk;\n if (chunk.type === 'complete') {\n completedTasks++;\n yield {\n type: 'progress',\n taskName: 'workflow',\n content: `Completed ${completedTasks}/${totalTasks} tasks`,\n progress: Math.round((completedTasks / totalTasks) * 100),\n timestamp: Date.now(),\n };\n }\n }\n }\n }\n }\n\n private async *executeTaskStream(\n task: StreamingDAGTask\n ): AsyncGenerator<StreamChunk, void, unknown> {\n const taskStartTime = Date.now();\n\n try {\n yield {\n type: 'progress',\n taskName: task.name,\n content: `Starting task execution: ${task.name}`,\n progress: 0,\n timestamp: Date.now(),\n };\n\n const input = this.context.getAll();\n let output: Record<string, any>;\n\n // Check if it's a streaming task\n if (task.isStreaming && task.executeStream) {\n const generator = task.executeStream(input);\n let finalResult: Record<string, any> = {};\n\n try {\n // Iterate generator and yield all intermediate results\n while (true) {\n const { value, done } = await generator.next();\n if (done) {\n finalResult = value || {};\n break;\n }\n // Yield streaming data\n yield value;\n }\n } catch (error) {\n // Error during streaming task execution\n throw error;\n }\n\n output = finalResult;\n } else {\n // Regular task execution\n output = await task.execute(input);\n }\n\n // Store results\n const taskName = task.name || '';\n this.context.set(taskName, output);\n\n for (const [key, value] of Object.entries(output)) {\n this.context.set(key, value);\n }\n\n // Generate unique task key\n let uniqueKey = taskName;\n let counter = 1;\n while (this.taskResults.has(uniqueKey)) {\n uniqueKey = `${taskName}_${counter}`;\n counter++;\n }\n\n const result: TaskExecutionResult = {\n taskName: taskName,\n status: 'completed',\n output: output,\n duration: Date.now() - taskStartTime,\n timestamp: Date.now(),\n };\n\n this.taskResults.set(uniqueKey, result);\n this.context.addExecutionResult(result);\n\n yield {\n type: 'complete',\n taskName: task.name,\n content: `Task completed: ${task.name}`,\n progress: 100,\n timestamp: Date.now(),\n metadata: { duration: result.duration },\n };\n } catch (error) {\n const taskName = task.name || '';\n let uniqueKey = taskName;\n let counter = 1;\n while (this.taskResults.has(uniqueKey)) {\n uniqueKey = `${taskName}_${counter}`;\n counter++;\n }\n\n const result: TaskExecutionResult = {\n taskName: taskName,\n status: 'failed',\n error: error instanceof Error ? error.message : String(error),\n duration: Date.now() - taskStartTime,\n timestamp: Date.now(),\n };\n\n this.taskResults.set(uniqueKey, result);\n this.context.addExecutionResult(result);\n\n yield {\n type: 'error',\n taskName: task.name,\n content: error instanceof Error ? error.message : String(error),\n timestamp: Date.now(),\n };\n }\n }\n}\n\n// 🌊 流式策略工作流实现\nclass StreamingStrategyBasedWorkflow\n extends StrategyBasedWorkflow\n implements StreamingWorkflow\n{\n private streamResult: WorkflowResult | undefined;\n\n executeStream(input: TaskInput = {}): StreamingWorkflowResult {\n this.streamResult = undefined; // Reset\n const stream = this.createDynamicExecutionStream(input);\n\n const resultPromise = (async (): Promise<WorkflowResult> => {\n // Consume stream until completion, generator will set streamResult\n for await (const chunk of stream) {\n // Stream processing\n }\n\n // Return streaming execution result\n return (\n this.streamResult || {\n success: false,\n error: new Error('Streaming execution not completed'),\n executionTime: 0,\n taskResults: new Map(),\n }\n );\n })();\n\n return {\n stream: this.createDynamicExecutionStream(input),\n getResult: () => resultPromise,\n };\n }\n\n private async *createDynamicExecutionStream(\n input: TaskInput\n ): AsyncGenerator<StreamChunk, WorkflowResult, unknown> {\n this.startTime = Date.now();\n this.dynamicTasksGenerated = 0;\n this.currentStep = 0;\n this.usedStrategies.clear();\n\n try {\n Object.entries(input).forEach(([key, value]) => {\n this.context.set(key, value);\n });\n\n yield {\n type: 'progress',\n taskName: 'workflow',\n content: 'Dynamic workflow execution started',\n progress: 0,\n timestamp: Date.now(),\n };\n\n // Dynamic execution loop\n while (this.hasTasksToExecute() && this.shouldContinue()) {\n this.currentStep++;\n\n yield {\n type: 'progress',\n taskName: 'workflow',\n content: `Executing step ${this.currentStep}`,\n progress: Math.min(\n (this.currentStep / (this.config.maxDynamicSteps || 50)) * 100,\n 90\n ),\n timestamp: Date.now(),\n };\n\n // Execute current batch of tasks\n const readyTasks = this.getReadyTasks();\n for (const task of readyTasks) {\n yield* this.executeTaskStreamForStrategy(task as StreamingDAGTask);\n }\n\n // Evaluate strategies and generate new tasks\n await this.evaluateStrategiesAndGenerateTasks();\n\n if (this.dynamicTasksGenerated > 0) {\n yield {\n type: 'data',\n taskName: 'strategy',\n content: `Dynamically generated ${this.dynamicTasksGenerated} new tasks`,\n timestamp: Date.now(),\n };\n }\n }\n\n const result: WorkflowResult = {\n success: true,\n data: this.context.getAll(),\n executionTime: Date.now() - this.startTime,\n taskResults: this.taskResults,\n dynamicTasksGenerated: this.dynamicTasksGenerated,\n totalSteps: this.currentStep,\n };\n\n // Save result for getResult usage\n this.streamResult = result;\n\n yield {\n type: 'complete',\n taskName: 'workflow',\n content: 'Dynamic workflow execution completed',\n progress: 100,\n timestamp: Date.now(),\n };\n\n return result;\n } catch (error) {\n const errorResult: WorkflowResult = {\n success: false,\n error: error as Error,\n executionTime: Date.now() - this.startTime,\n taskResults: this.taskResults,\n dynamicTasksGenerated: this.dynamicTasksGenerated,\n totalSteps: this.currentStep,\n };\n\n // Save error result\n this.streamResult = errorResult;\n\n yield {\n type: 'error',\n taskName: 'workflow',\n content: error instanceof Error ? error.message : String(error),\n timestamp: Date.now(),\n };\n\n return errorResult;\n }\n }\n\n private async *executeTaskStreamForStrategy(\n task: StreamingDAGTask\n ): AsyncGenerator<StreamChunk, void, unknown> {\n const taskStartTime = Date.now();\n\n try {\n yield {\n type: 'progress',\n taskName: task.name,\n content: `Starting dynamic task execution: ${task.name}`,\n progress: 0,\n timestamp: Date.now(),\n };\n\n const input = this.context.getAll();\n let output: Record<string, any>;\n\n // Check if it's a streaming task\n if (task.isStreaming && task.executeStream) {\n const generator = task.executeStream(input);\n let finalResult: Record<string, any> = {};\n\n try {\n // Iterate generator and yield all intermediate results\n while (true) {\n const { value, done } = await generator.next();\n if (done) {\n finalResult = value || {};\n break;\n }\n // Yield streaming data\n yield value;\n }\n } catch (error) {\n // Error during streaming task execution\n throw error;\n }\n\n output = finalResult;\n } else {\n // Regular task execution\n output = await task.execute(input);\n }\n\n // Store results (same logic as base class)\n const taskName = task.name || '';\n this.context.set(taskName, output);\n\n for (const [key, value] of Object.entries(output)) {\n this.context.set(key, value);\n }\n\n let uniqueKey = taskName;\n let counter = 1;\n while (this.taskResults.has(uniqueKey)) {\n uniqueKey = `${taskName}_${counter}`;\n counter++;\n }\n\n const result: TaskExecutionResult = {\n taskName: taskName,\n status: 'completed',\n output: output,\n duration: Date.now() - taskStartTime,\n timestamp: Date.now(),\n };\n\n this.taskResults.set(uniqueKey, result);\n this.context.addExecutionResult(result);\n\n yield {\n type: 'complete',\n taskName: task.name,\n content: `Dynamic task completed: ${task.name}`,\n progress: 100,\n timestamp: Date.now(),\n metadata: { duration: result.duration },\n };\n } catch (error) {\n console.warn('Dynamic task execution failed:', error);\n\n const taskName = task.name || '';\n let uniqueKey = taskName;\n let counter = 1;\n while (this.taskResults.has(uniqueKey)) {\n uniqueKey = `${taskName}_${counter}`;\n counter++;\n }\n\n const result: TaskExecutionResult = {\n taskName: taskName,\n status: 'failed',\n error: error instanceof Error ? error.message : String(error),\n duration: Date.now() - taskStartTime,\n timestamp: Date.now(),\n };\n\n this.taskResults.set(uniqueKey, result);\n this.context.addExecutionResult(result);\n\n yield {\n type: 'error',\n taskName: task.name,\n content: error instanceof Error ? error.message : String(error),\n timestamp: Date.now(),\n };\n }\n }\n}\n\n// 🤖 AI SDK 兼容的静态流式工作流\nclass AISDKStreamingStaticWorkflow\n extends StaticWorkflow\n implements AISDKStreamingWorkflow\n{\n executeStreamAISDK(input: TaskInput = {}): AISDKStreamingWorkflowResult {\n const resultPromise = this.execute(input);\n\n // 创建组合的流\n const combinedStreams = this.createCombinedAISDKStreams(input);\n\n return {\n textStream: combinedStreams.textStream,\n fullStream: combinedStreams.fullStream,\n toDataStreamResponse: () => combinedStreams.toDataStreamResponse(),\n toReadableStream: () => combinedStreams.toReadableStream(),\n getResult: () => resultPromise,\n };\n }\n\n private createCombinedAISDKStreams(input: TaskInput) {\n const textChunks: string[] = [];\n const dataChunks: any[] = [];\n\n const self = this;\n\n // 创建异步生成器函数\n const textStreamGenerator = async function* (): AsyncGenerator<\n string,\n void,\n unknown\n > {\n try {\n Object.entries(input).forEach(([key, value]) => {\n self.context.set(key, value);\n });\n\n const levels = self.computeExecutionLevels();\n\n for (const level of levels) {\n for (const task of level) {\n const aiTask = task as AISDKStreamingTask;\n\n if (aiTask.isAISDKStreaming && aiTask.executeStreamAI) {\n const streamResult = await aiTask.executeStreamAI(\n self.context.getAll()\n );\n\n if (streamResult.textStream) {\n for await (const chunk of streamResult.textStream) {\n textChunks.push(chunk);\n yield chunk;\n }\n }\n } else {\n // 普通任务,生成状态文本\n const statusText = `[${task.name}] Task completed\\n`;\n textChunks.push(statusText);\n yield statusText;\n\n // 执行任务\n await self.executeTask(task);\n }\n }\n }\n } catch (error) {\n const errorText = `Error: ${\n error instanceof Error ? error.message : String(error)\n }\\n`;\n textChunks.push(errorText);\n yield errorText;\n }\n };\n\n const fullStreamGenerator = async function* (): AsyncGenerator<\n any,\n void,\n unknown\n > {\n try {\n Object.entries(input).forEach(([key, value]) => {\n self.context.set(key, value);\n });\n\n yield { type: 'workflow-start', data: { status: 'starting' } };\n\n const levels = self.computeExecutionLevels();\n\n for (const level of levels) {\n for (const task of level) {\n const aiTask = task as AISDKStreamingTask;\n\n yield { type: 'task-start', data: { taskName: task.name } };\n\n if (aiTask.isAISDKStreaming && aiTask.executeStreamAI) {\n const streamResult = await aiTask.executeStreamAI(\n self.context.getAll()\n );\n\n if (streamResult.fullStream) {\n for await (const chunk of streamResult.fullStream) {\n dataChunks.push(chunk);\n yield { type: 'ai-chunk', data: chunk };\n }\n }\n } else {\n // 执行普通任务\n await self.executeTask(task);\n yield {\n type: 'task-complete',\n data: {\n taskName: task.name,\n result: self.context.get(task.name),\n },\n };\n }\n }\n }\n\n yield {\n type: 'workflow-complete',\n data: {\n status: 'completed',\n finalResult: self.context.getAll(),\n },\n };\n } catch (error) {\n yield {\n type: 'workflow-error',\n data: {\n error: error instanceof Error ? error.message : String(error),\n },\n };\n }\n };\n\n return {\n get textStream() {\n return textStreamGenerator();\n },\n get fullStream() {\n return fullStreamGenerator();\n },\n\n toDataStreamResponse(): Response {\n const encoder = new TextEncoder();\n\n const stream = new ReadableStream({\n async start(controller) {\n try {\n for await (const chunk of fullStreamGenerator()) {\n const data = `data: ${JSON.stringify(chunk)}\\n\\n`;\n controller.enqueue(encoder.encode(data));\n }\n controller.close();\n } catch (error) {\n controller.error(error);\n }\n },\n });\n\n return new Response(stream, {\n headers: {\n 'Content-Type': 'text/event-stream',\n 'Cache-Control': 'no-cache',\n Connection: 'keep-alive',\n },\n });\n },\n\n toReadableStream(): ReadableStream {\n const encoder = new TextEncoder();\n\n return new ReadableStream({\n async start(controller) {\n try {\n for await (const textChunk of textStreamGenerator()) {\n controller.enqueue(encoder.encode(textChunk));\n }\n controller.close();\n } catch (error) {\n controller.error(error);\n }\n },\n });\n },\n };\n }\n}\n\n// 🤖 AI SDK 兼容的策略流式工作流\nclass AISDKStreamingStrategyWorkflow\n extends StrategyBasedWorkflow\n implements AISDKStreamingWorkflow\n{\n executeStreamAISDK(input: TaskInput = {}): AISDKStreamingWorkflowResult {\n const resultPromise = this.execute(input);\n\n // 创建组合的流(动态版本)\n const combinedStreams = this.createDynamicCombinedAISDKStreams(input);\n\n return {\n textStream: combinedStreams.textStream,\n fullStream: combinedStreams.fullStream,\n toDataStreamResponse: () => combinedStreams.toDataStreamResponse(),\n toReadableStream: () => combinedStreams.toReadableStream(),\n getResult: () => resultPromise,\n };\n }\n\n private createDynamicCombinedAISDKStreams(input: TaskInput) {\n const textChunks: string[] = [];\n const dataChunks: any[] = [];\n\n const self = this;\n\n const textStreamGenerator = async function* (): AsyncGenerator<\n string,\n void,\n unknown\n > {\n try {\n Object.entries(input).forEach(([key, value]) => {\n self.context.set(key, value);\n });\n\n self.currentStep = 0;\n self.dynamicTasksGenerated = 0;\n self.usedStrategies.clear();\n\n while (self.hasTasksToExecute() && self.shouldContinue()) {\n self.currentStep++;\n\n const readyTasks = self.getReadyTasks();\n\n for (const task of readyTasks) {\n const aiTask = task as AISDKStreamingTask;\n\n if (aiTask.isAISDKStreaming && aiTask.executeStreamAI) {\n const streamResult = await aiTask.executeStreamAI(\n self.context.getAll()\n );\n\n if (streamResult.textStream) {\n for await (const chunk of streamResult.textStream) {\n textChunks.push(chunk);\n yield chunk;\n }\n }\n } else {\n const statusText = `[${task.name}] Task completed\\n`;\n textChunks.push(statusText);\n yield statusText;\n\n await self.executeTask(task);\n }\n }\n\n // 评估动态策略\n await self.evaluateStrategiesAndGenerateTasks();\n }\n } catch (error) {\n const errorText = `Error: ${\n error instanceof Error ? error.message : String(error)\n }\\n`;\n textChunks.push(errorText);\n yield errorText;\n }\n };\n\n const fullStreamGenerator = async function* (): AsyncGenerator<\n any,\n void,\n unknown\n > {\n try {\n Object.entries(input).forEach(([key, value]) => {\n self.context.set(key, value);\n });\n\n yield { type: 'dynamic-workflow-start', data: { status: 'starting' } };\n\n self.currentStep = 0;\n self.dynamicTasksGenerated = 0;\n self.usedStrategies.clear();\n\n while (self.hasTasksToExecute() && self.shouldContinue()) {\n self.currentStep++;\n\n yield {\n type: 'dynamic-step',\n data: {\n step: self.currentStep,\n maxSteps: self.config.maxDynamicSteps || 50,\n },\n };\n\n const readyTasks = self.getReadyTasks();\n\n for (const task of readyTasks) {\n const aiTask = task as AISDKStreamingTask;\n\n yield { type: 'task-start', data: { taskName: task.name } };\n\n if (aiTask.isAISDKStreaming && aiTask.executeStreamAI) {\n const streamResult = await aiTask.executeStreamAI(\n self.context.getAll()\n );\n\n if (streamResult.fullStream) {\n for await (const chunk of streamResult.fullStream) {\n dataChunks.push(chunk);\n yield { type: 'ai-chunk', data: chunk };\n }\n }\n } else {\n await self.executeTask(task);\n yield {\n type: 'task-complete',\n data: {\n taskName: task.name,\n result: self.context.get(task.name),\n },\n };\n }\n }\n\n // 评估动态策略\n const beforeCount = self.tasks.length;\n await self.evaluateStrategiesAndGenerateTasks();\n const afterCount = self.tasks.length;\n\n if (afterCount > beforeCount) {\n yield {\n type: 'dynamic-tasks-generated',\n data: {\n newTasks: afterCount - beforeCount,\n totalGenerated: self.dynamicTasksGenerated,\n },\n };\n }\n }\n\n yield {\n type: 'dynamic-workflow-complete',\n data: {\n status: 'completed',\n totalSteps: self.currentStep,\n dynamicTasksGenerated: self.dynamicTasksGenerated,\n finalResult: self.context.getAll(),\n },\n };\n } catch (error) {\n yield {\n type: 'dynamic-workflow-error',\n data: {\n error: error instanceof Error ? error.message : String(error),\n },\n };\n }\n };\n\n return {\n get textStream() {\n return textStreamGenerator();\n },\n get fullStream() {\n return fullStreamGenerator();\n },\n\n toDataStreamResponse(): Response {\n const encoder = new TextEncoder();\n\n const stream = new ReadableStream({\n async start(controller) {\n try {\n for await (const chunk of fullStreamGenerator()) {\n const data = `data: ${JSON.stringify(chunk)}\\n\\n`;\n controller.enqueue(encoder.encode(data));\n }\n controller.close();\n } catch (error) {\n controller.error(error);\n }\n },\n });\n\n return new Response(stream, {\n headers: {\n 'Content-Type': 'text/event-stream',\n 'Cache-Control': 'no-cache',\n Connection: 'keep-alive',\n },\n });\n },\n\n toReadableStream(): ReadableStream {\n const encoder = new TextEncoder();\n\n return new ReadableStream({\n async start(controller) {\n try {\n for await (const textChunk of textStreamGenerator()) {\n controller.enqueue(encoder.encode(textChunk));\n }\n controller.close();\n } catch (error) {\n controller.error(error);\n }\n },\n });\n },\n };\n }\n}\n","import type { DAGTask } from \"./WorkflowBuilder\";\n\nexport interface TaskDefinition {\n name: string;\n description: string;\n capabilities: string[];\n createTask: () => DAGTask;\n}\n\nexport class TaskRegistry {\n private static instance: TaskRegistry;\n private tasks: Map<string, TaskDefinition> = new Map();\n\n private constructor() {}\n\n static getInstance(): TaskRegistry {\n if (!TaskRegistry.instance) {\n TaskRegistry.instance = new TaskRegistry();\n }\n return TaskRegistry.instance;\n }\n\n registerTask(definition: TaskDefinition): void {\n this.tasks.set(definition.name, definition);\n }\n\n getTask(name: string): TaskDefinition | undefined {\n return this.tasks.get(name);\n }\n\n getAllTasks(): TaskDefinition[] {\n return Array.from(this.tasks.values());\n }\n\n getTasksByCapability(capability: string): TaskDefinition[] {\n return this.getAllTasks().filter((task) =>\n task.capabilities.includes(capability),\n );\n }\n\n clear(): void {\n this.tasks.clear();\n }\n}\n"],"names":["self"],"mappings":";;;;EAIO,MAAM,eAAe;AAAA,IAArB,cAAA;AACL,WAAQ,UAAqB,CAAC;AAAA,IAAA;AAAA;AAAA;AAAA,IAI9B,IAAI,KAAkB;AACb,aAAA,KAAK,QAAQ,GAAG;AAAA,IAAA;AAAA;AAAA;AAAA,IAKzB,IAAI,KAAa,OAAkB;AAC5B,WAAA,QAAQ,GAAG,IAAI;AAAA,IAAA;AAAA;AAAA,IAItB,SAAoB;AAClB,aAAO,KAAK;AAAA,IAAA;AAAA;AAAA,IAId,QAAc;AACZ,WAAK,UAAU,CAAC;AAAA,IAAA;AAAA,EAEpB;AAAA,ECxBO,MAAe,QAAQ;AAAA,IAI5B,YAAY,eAA0B,IAAI;AAF1C,WAAO,YAAuB,CAAC;AAG7B,WAAK,YAAY;AAAA,IAAA;AAAA,EAIrB;AAAA,EAGO,MAAe,yBAAyB,QAAQ;AAAA,EAKvD;AAAA,EAoEO,MAAe,2BAA2B,QAAQ;AAAA,EAQzD;AAAA,EAYO,MAAM,gBAAgB;AAAA;AAAA,IAOnB,cAAc;AANtB,WAAQ,SAAyB,CAAC;AAClC,WAAQ,cAAyB,CAAC;AAElC,WAAQ,oBAAuC,CAAC;AAAA,IAAA;AAAA;AAAA,IAMhD,OAAO,SAA0B;AAC/B,aAAO,IAAI,gBAAgB;AAAA,IAAA;AAAA;AAAA,IAI7B,WAAW,QAAuC;AAChD,WAAK,SAAS,EAAE,GAAG,KAAK,QAAQ,GAAG,OAAO;AACnC,aAAA;AAAA,IAAA;AAAA,IAGT,UAAU,UAAwB;AAChC,WAAK,OAAO,gBAAgB;AACrB,aAAA;AAAA,IAAA;AAAA,IAGT,YAAY,WAAyB;AACnC,WAAK,OAAO,YAAY;AACjB,aAAA;AAAA,IAAA;AAAA;AAAA,IAIT,QAAQ,MAAqB;AACtB,WAAA,YAAY,KAAK,IAAI;AACnB,aAAA;AAAA,IAAA;AAAA,IAGT,SAAS,OAAwB;AAC1B,WAAA,YAAY,KAAK,GAAG,KAAK;AACvB,aAAA;AAAA,IAAA;AAAA;AAAA,IAIT,mBAAmB,UAAiC;AAC7C,WAAA,kBAAkB,KAAK,QAAQ;AAC7B,aAAA;AAAA,IAAA;AAAA;AAAA,IAIT,cACE,WACA,eACM;AACN,aAAO,KAAK,mBAAmB;AAAA,QAC7B,MAAM,aAAa,KAAK,kBAAkB,MAAM;AAAA,QAChD,WAAW,CAAC,YAAY;AAEhB,gBAAA,UAAU,QAAQ,oBAAoB;AAC5C,gBAAM,iBAAiB,QAAQ,OAAO,CAAC,MAAM,EAAE,WAAW,WAAW;AAGjE,cAAA,eAAe,SAAS,GAAG;AAC7B,mBAAO,UAAU,OAAO;AAAA,UAAA;AAEnB,iBAAA;AAAA,QACT;AAAA,QACA,WAAW;AAAA,QACX,MAAM;AAAA,MAAA,CACP;AAAA,IAAA;AAAA;AAAA,IAIH,eACE,UACA,iBAIM;AACN,aAAO,KAAK,mBAAmB;AAAA,QAC7B,MAAM,eAAe,QAAQ;AAAA,QAC7B,WAAW,CAAC,YAAY;AAChB,gBAAA,UAAU,QAAQ,oBAAoB;AAC5C,iBAAO,QAAQ;AAAA,YACb,CAAC,MAAM,EAAE,aAAa,YAAY,EAAE,WAAW;AAAA,UACjD;AAAA,QACF;AAAA,QACA,WAAW,OAAO,YAAY;AACtB,gBAAA,UAAU,QAAQ,oBAAoB;AAC5C,gBAAM,aAAa,QAAQ,KAAK,CAAC,MAAM,EAAE,aAAa,QAAQ;AACvD,iBAAA,gBAAgB,yCAAY,QAAQ,OAAO;AAAA,QACpD;AAAA,QACA,MAAM;AAAA,MAAA,CACP;AAAA,IAAA;AAAA;AAAA,IAIH,gBACE,YACA,eACM;AACN,aAAO,KAAK,mBAAmB;AAAA,QAC7B,MAAM,cAAc,UAAU;AAAA,QAC9B,WAAW,CAAC,YAAY;AAEhB,gBAAA,QAAQ,QAAQ,IAAI,UAAU;AACpC,iBAAO,UAAU;AAAA,QACnB;AAAA,QACA,WAAW,OAAO,YAAY;AACtB,gBAAA,QAAQ,QAAQ,IAAI,UAAU;AAC7B,iBAAA,cAAc,OAAO,OAAO;AAAA,QACrC;AAAA,QACA,MAAM;AAAA,MAAA,CACP;AAAA,IAAA;AAAA;AAAA,IAIH,QAAkB;AACZ,UAAA,KAAK,kBAAkB,SAAS,GAAG;AACrC,eAAO,IAAI;AAAA,UACT,KAAK;AAAA,UACL,KAAK;AAAA,UACL,KAAK;AAAA,QACP;AAAA,MAAA;AAGF,aAAO,IAAI,eAAe,KAAK,QAAQ,KAAK,WAAW;AAAA,IAAA;AAAA;AAAA,IAIzD,iBAAoC;AAClC,WAAK,OAAO,kBAAkB;AAE1B,UAAA,KAAK,kBAAkB,SAAS,GAAG;AACrC,eAAO,IAAI;AAAA,UACT,KAAK;AAAA,UACL,KAAK;AAAA,UACL,KAAK;AAAA,QACP;AAAA,MAAA;AAGF,aAAO,IAAI,wBAAwB,KAAK,QAAQ,KAAK,WAAW;AAAA,IAAA;AAAA;AAAA,IAIlE,sBAA8C;AAC5C,WAAK,OAAO,kBAAkB;AAE1B,UAAA,KAAK,kBAAkB,SAAS,GAAG;AACrC,eAAO,IAAI;AAAA,UACT,KAAK;AAAA,UACL,KAAK;AAAA,UACL,KAAK;AAAA,QACP;AAAA,MAAA;AAGF,aAAO,IAAI,6BAA6B,KAAK,QAAQ,KAAK,WAAW;AAAA,IAAA;AAAA,EAEzE;AAAA,EAqBA,MAAM,wBAAmD