codecrucible-synth
Version:
Production-Ready AI Development Platform with Multi-Voice Synthesis, Smithery MCP Integration, Enterprise Security, and Zero-Timeout Reliability
1,281 lines • 61.4 kB
JavaScript
/**
* Advanced Workflow Orchestrator implementing 9 industry-standard agentic patterns
* Based on research from Amazon Q CLI, ZCF, and emerging AI agent architectures
*/
import { EventEmitter } from 'events';
import { Logger } from '../logger.js';
import { getErrorMessage } from '../../utils/error-utils.js';
// Utility Functions
/**
* Convert ModelResponse to JsonValue for workflow compatibility
*/
function modelResponseToJsonValue(response) {
if (!response || typeof response !== 'object') {
return response;
}
// Convert ModelResponse to plain JsonObject
const jsonObject = {
content: response.content || '',
model: response.model || '',
provider: response.provider || '',
metadata: response.metadata || {},
tokens_used: response.tokens_used || 0,
usage: response.usage || {},
cached: response.cached || false,
streamed: response.streamed || false,
processingTime: response.processingTime || 0,
};
return jsonObject;
}
// Workflow Pattern Types
export var WorkflowPatternType;
(function (WorkflowPatternType) {
WorkflowPatternType["SEQUENTIAL"] = "sequential";
WorkflowPatternType["PARALLEL"] = "parallel";
WorkflowPatternType["HIERARCHICAL"] = "hierarchical";
WorkflowPatternType["ADAPTIVE"] = "adaptive";
WorkflowPatternType["FEEDBACK"] = "feedback";
WorkflowPatternType["ITERATIVE"] = "iterative";
WorkflowPatternType["BRANCHING"] = "branching";
WorkflowPatternType["STREAMING"] = "streaming";
WorkflowPatternType["MEMORY"] = "memory";
})(WorkflowPatternType || (WorkflowPatternType = {}));
// Main Orchestrator Class
export class WorkflowOrchestrator extends EventEmitter {
modelClient;
logger;
executionHistory;
activeWorkflows;
patternHandlers;
cleanupInterval = null;
isDisposed = false;
MAX_HISTORY_SIZE = 1000;
MAX_ACTIVE_WORKFLOWS = 100;
constructor(modelClient) {
super();
this.modelClient = modelClient;
this.logger = new Logger('WorkflowOrchestrator');
this.executionHistory = new Map();
this.activeWorkflows = new Map();
this.patternHandlers = this.initializeHandlers();
// Set up periodic cleanup to prevent memory leaks
this.setupCleanupInterval();
}
/**
* Initialize the workflow orchestrator
*/
async initialize() {
this.logger.info('Initializing Workflow Orchestrator');
// Initialize any async resources if needed
// Currently, all initialization is done in constructor
return Promise.resolve();
}
setupCleanupInterval() {
// Run cleanup every 10 minutes
this.cleanupInterval = setInterval(() => {
if (this.isDisposed) {
if (this.cleanupInterval) {
clearInterval(this.cleanupInterval);
this.cleanupInterval = null;
}
return;
}
this.performCleanup();
}, 10 * 60 * 1000);
// Ensure cleanup interval doesn't prevent process exit
if (this.cleanupInterval?.unref) {
this.cleanupInterval.unref();
}
}
performCleanup() {
// Basic cleanup - can be expanded
try {
const now = Date.now();
// Clean up old executions
for (const [key, execution] of this.activeWorkflows) {
if (now - execution.startTime > 3600000) {
// 1 hour
this.activeWorkflows.delete(key);
}
}
this.logger.info('🧹 Performed periodic cleanup');
}
catch (error) {
this.logger.error('Failed to perform cleanup:', error);
}
}
initializeHandlers() {
const handlers = new Map();
handlers.set(WorkflowPatternType.SEQUENTIAL, new SequentialHandler(this.modelClient));
handlers.set(WorkflowPatternType.PARALLEL, new ParallelHandler(this.modelClient));
handlers.set(WorkflowPatternType.HIERARCHICAL, new HierarchicalHandler(this.modelClient));
handlers.set(WorkflowPatternType.ADAPTIVE, new AdaptiveHandler(this.modelClient));
handlers.set(WorkflowPatternType.FEEDBACK, new FeedbackHandler(this.modelClient));
handlers.set(WorkflowPatternType.ITERATIVE, new IterativeHandler(this.modelClient));
handlers.set(WorkflowPatternType.BRANCHING, new BranchingHandler(this.modelClient));
handlers.set(WorkflowPatternType.STREAMING, new StreamingHandler(this.modelClient));
handlers.set(WorkflowPatternType.MEMORY, new MemoryHandler(this.modelClient));
return handlers;
}
async executePattern(pattern, request, config) {
const executionId = this.generateExecutionId();
const startTime = Date.now();
this.logger.info(`Starting ${pattern} workflow execution`, { executionId });
this.emit('workflow:start', { executionId, pattern, request });
try {
const handler = this.patternHandlers.get(pattern);
if (!handler) {
throw new Error(`No handler found for pattern: ${pattern}`);
}
const workflow = new WorkflowExecution(executionId, pattern, request, config);
this.activeWorkflows.set(executionId, workflow);
const result = await handler.execute(request, config, progress => {
this.emit('workflow:progress', { executionId, progress });
workflow.updateProgress(progress);
});
const executionTime = Date.now() - startTime;
const response = {
success: true,
result,
executionTime,
metadata: {
pattern,
executionId,
...result.metadata,
},
};
this.recordExecution(executionId, result);
this.emit('workflow:complete', { executionId, response });
return response;
}
catch (error) {
this.logger.error(`Workflow execution failed`, { executionId, error });
this.emit('workflow:error', { executionId, error });
return {
success: false,
error: getErrorMessage(error),
executionTime: Date.now() - startTime,
metadata: { pattern, executionId },
};
}
finally {
this.activeWorkflows.delete(executionId);
}
}
async executeSequentialChain(request) {
return this.executePattern(WorkflowPatternType.SEQUENTIAL, request);
}
async executeParallelAgents(request) {
return this.executePattern(WorkflowPatternType.PARALLEL, request);
}
async executeAdaptiveWorkflow(request) {
return this.executePattern(WorkflowPatternType.ADAPTIVE, request);
}
async executeHierarchicalOrchestration(request) {
return this.executePattern(WorkflowPatternType.HIERARCHICAL, request);
}
async executeWithFeedback(request) {
return this.executePattern(WorkflowPatternType.FEEDBACK, request);
}
async executeIterativeRefinement(request) {
return this.executePattern(WorkflowPatternType.ITERATIVE, request);
}
async executeBranchingLogic(request) {
return this.executePattern(WorkflowPatternType.BRANCHING, request);
}
async executeStreamingWorkflow(request) {
return this.executePattern(WorkflowPatternType.STREAMING, request);
}
async executeWithMemory(request) {
return this.executePattern(WorkflowPatternType.MEMORY, request);
}
// Utility methods
generateExecutionId() {
return `exec_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
}
recordExecution(executionId, result) {
const history = this.executionHistory.get(executionId) || [];
history.push(result);
this.executionHistory.set(executionId, history);
}
getExecutionHistory(executionId) {
return this.executionHistory.get(executionId);
}
getActiveWorkflows() {
return Array.from(this.activeWorkflows.values());
}
cancelWorkflow(executionId) {
const workflow = this.activeWorkflows.get(executionId);
if (workflow) {
workflow.cancel();
this.activeWorkflows.delete(executionId);
this.emit('workflow:cancelled', { executionId });
return true;
}
return false;
}
}
// Workflow Execution Class
class WorkflowExecution {
id;
pattern;
request;
config;
startTime;
status;
progress;
constructor(id, pattern, request, config, startTime = Date.now(), status = 'running', progress = 0) {
this.id = id;
this.pattern = pattern;
this.request = request;
this.config = config;
this.startTime = startTime;
this.status = status;
this.progress = progress;
}
updateProgress(progress) {
this.progress = Math.min(100, Math.max(0, progress));
}
cancel() {
this.status = 'cancelled';
}
}
// Abstract Pattern Handler
class PatternHandler {
modelClient;
constructor(modelClient) {
this.modelClient = modelClient;
}
}
// Sequential Handler Implementation
class SequentialHandler extends PatternHandler {
async execute(request, config, onProgress) {
const results = [];
const context = config?.context || new Map();
const steps = config?.steps || [];
for (let i = 0; i < steps.length; i++) {
const step = steps[i];
onProgress((i / steps.length) * 100);
// Check dependencies
if (step.dependencies) {
for (const dep of step.dependencies) {
if (!context.has(dep)) {
throw new Error(`Missing dependency: ${dep}`);
}
}
}
// Execute step
const stepRequest = {
...request,
prompt: this.interpolatePrompt(step.prompt, context),
voice: step.voice || request.voice,
};
const result = await this.modelClient.synthesize(stepRequest);
// Transform and store result - convert to JsonValue for compatibility
const jsonResult = modelResponseToJsonValue(result);
const transformed = step.transform ? step.transform(jsonResult) : jsonResult;
context.set(step.id, transformed);
results.push(transformed);
}
onProgress(100);
return {
success: true,
content: results,
metadata: {
stepsCompleted: steps.length,
context: Object.fromEntries(context),
},
};
}
interpolatePrompt(prompt, context) {
let interpolated = prompt;
context.forEach((value, key) => {
interpolated = interpolated.replace(`{{${key}}}`, JSON.stringify(value));
});
return interpolated;
}
}
// Parallel Handler Implementation
class ParallelHandler extends PatternHandler {
async execute(request, config, onProgress) {
const tasks = config?.tasks || [];
const concurrency = config?.concurrency || 5;
const aggregation = config?.aggregation || 'merge';
const results = await this.executeWithConcurrency(tasks, concurrency, completed => onProgress((completed / tasks.length) * 100));
const aggregated = this.aggregateResults(results, aggregation);
return {
success: true,
content: aggregated,
metadata: {
tasksCompleted: tasks.length,
aggregationMethod: aggregation,
},
};
}
async executeWithConcurrency(tasks, concurrency, onTaskComplete) {
const results = new Array(tasks.length);
let completed = 0;
let index = 0;
const executeTask = async (taskIndex) => {
const task = tasks[taskIndex];
try {
const result = await this.modelClient.synthesize(task.request);
results[taskIndex] = result;
}
catch (error) {
results[taskIndex] = { error: getErrorMessage(error) };
}
completed++;
onTaskComplete(completed);
};
const promises = [];
while (index < tasks.length) {
while (promises.length < concurrency && index < tasks.length) {
promises.push(executeTask(index));
index++;
}
await Promise.race(promises);
promises.splice(promises.findIndex(p => p), 1);
}
await Promise.all(promises);
return results;
}
aggregateResults(results, method) {
switch (method) {
case 'merge':
return results.flat();
case 'reduce':
return results.reduce((acc, curr) => ({ ...acc, ...curr }), {});
case 'select':
return results.find(r => r && !r.error) || results[0];
default:
return results;
}
}
}
// Full Hierarchical Handler Implementation
class HierarchicalHandler extends PatternHandler {
async execute(request, config, onProgress) {
const agents = config?.agents || [];
const coordinator = config?.coordinator || 'coordinator';
const protocol = config?.communicationProtocol || 'broadcast';
// Create agent hierarchy
const agentMap = new Map();
agents.forEach(agent => agentMap.set(agent.id, agent));
// Sort agents by priority
const sortedAgents = agents.sort((a, b) => b.priority - a.priority);
// Initialize communication channel
const messageQueue = new MessageQueue(protocol);
const results = new Map();
// Coordinator processes request first
onProgress(10);
const coordinatorRequest = {
...request,
prompt: `As coordinator, analyze and delegate: ${request.prompt}`,
metadata: { role: 'coordinator' },
};
const coordinatorResponse = await this.modelClient.synthesize(coordinatorRequest);
const delegationPlan = this.parseDelegationPlan(coordinatorResponse);
// Execute delegated tasks
let progress = 20;
for (const delegation of delegationPlan) {
const agent = agentMap.get(delegation.agentId);
if (!agent)
continue;
onProgress(progress);
// Check if agent has required capabilities
if (!this.hasCapabilities(agent, delegation.requiredCapabilities)) {
results.set(agent.id, { error: 'Missing capabilities' });
continue;
}
// Execute agent task
const agentRequest = {
...request,
prompt: delegation.task,
voice: agent.type,
metadata: {
role: agent.type,
capabilities: agent.capabilities,
},
};
try {
const agentResult = await this.modelClient.synthesize(agentRequest);
results.set(agent.id, agentResult);
// Broadcast results if needed
if (protocol === 'broadcast') {
messageQueue.broadcast(agent.id, agentResult);
}
}
catch (error) {
results.set(agent.id, { error: getErrorMessage(error) });
}
progress += 60 / delegationPlan.length;
}
// Coordinator aggregates results
onProgress(90);
const aggregationRequest = {
...request,
prompt: `Aggregate these agent results: ${JSON.stringify(Object.fromEntries(results))}`,
metadata: { role: 'coordinator', phase: 'aggregation' },
};
const finalResult = await this.modelClient.synthesize(aggregationRequest);
onProgress(100);
return {
success: true,
content: finalResult,
metadata: {
pattern: 'hierarchical',
agentsUsed: results.size,
protocol,
results: Object.fromEntries(results),
},
};
}
parseDelegationPlan(response) {
// Parse delegation plan from coordinator response
// In production, use structured output or JSON mode
const plan = [];
const content = response.content || response;
// Simple parsing - in production use proper NLP or structured output
const lines = content.split('\n');
for (const line of lines) {
if (line.includes('->')) {
const [agentPart, taskPart] = line.split('->');
const agentId = agentPart.trim().toLowerCase();
const task = taskPart.trim();
plan.push({
agentId,
task,
requiredCapabilities: [],
});
}
}
return plan.length > 0
? plan
: [{ agentId: 'default', task: 'Process request', requiredCapabilities: [] }];
}
hasCapabilities(agent, required) {
return required.every(cap => agent.capabilities.includes(cap));
}
}
// Message Queue for inter-agent communication
class MessageQueue {
protocol;
messages = new Map();
constructor(protocol) {
this.protocol = protocol;
}
broadcast(from, message) {
if (this.protocol === 'broadcast') {
// Store message for all agents
this.messages.set('broadcast', [
...(this.messages.get('broadcast') || []),
{ from, message },
]);
}
}
send(from, to, message) {
if (this.protocol === 'direct') {
const key = `${from}->${to}`;
this.messages.set(key, [...(this.messages.get(key) || []), message]);
}
}
getMessages(agentId) {
if (this.protocol === 'broadcast') {
return this.messages.get('broadcast') || [];
}
return [];
}
}
class AdaptiveHandler extends PatternHandler {
executionHistory = [];
currentWorkflow = [];
async execute(request, config, onProgress) {
const initialPlan = config?.initialPlan || [];
const adaptationRules = config?.adaptationRules || [];
const learningRate = config?.learningRate || 0.1;
this.currentWorkflow = [...initialPlan];
let currentNodeId = initialPlan[0]?.id || 'start';
const context = {
nodeId: currentNodeId,
request,
history: [],
metrics: {
successRate: 1.0,
avgLatency: 0,
adaptations: 0,
},
};
let progress = 0;
const results = [];
while (currentNodeId && currentNodeId !== 'end') {
const node = this.currentWorkflow.find(n => n.id === currentNodeId);
if (!node)
break;
onProgress(progress);
// Execute current node
const nodeResult = await this.executeNode(node, request, context);
results.push(nodeResult);
context.history.push(nodeResult);
// Check adaptation rules
for (const rule of adaptationRules) {
if (rule.trigger(context, this.executionHistory)) {
// Apply adaptation
rule.action({ initialPlan: this.currentWorkflow, adaptationRules, learningRate });
context.metrics.adaptations++;
// Re-evaluate current position
const adaptedNode = this.findBestNode(context, this.currentWorkflow);
if (adaptedNode) {
currentNodeId = adaptedNode.id;
context.nodeId = currentNodeId;
continue;
}
}
}
// Determine next node based on transitions
const nextNode = this.selectNextNode(node, context);
currentNodeId = nextNode?.id || 'end';
context.nodeId = currentNodeId;
// Update metrics
this.updateMetrics(context, nodeResult);
// Apply learning
this.applyLearning(node, nodeResult, learningRate);
progress = Math.min(90, progress + 90 / initialPlan.length);
}
onProgress(100);
return {
success: true,
content: results,
metadata: {
pattern: 'adaptive',
adaptations: context.metrics.adaptations,
nodesExecuted: results.length,
finalMetrics: context.metrics,
},
};
}
async executeNode(node, request, context) {
// Recursively execute workflow patterns
const orchestrator = new WorkflowOrchestrator(this.modelClient);
const nodeRequest = {
...request,
prompt: `${request.prompt} [Node: ${node.id}]`,
};
const result = await orchestrator.executePattern(node.type, nodeRequest, node.config);
return {
nodeId: node.id,
type: node.type,
result,
timestamp: Date.now(),
};
}
selectNextNode(currentNode, context) {
// Evaluate transitions
for (const transition of currentNode.transitions) {
if (transition.condition(context)) {
const nextNode = this.currentWorkflow.find(n => n.id === transition.to);
if (nextNode)
return nextNode;
}
}
// Probabilistic selection if no conditions met
const validTransitions = currentNode.transitions.filter(t => t.probability);
if (validTransitions.length > 0) {
const random = Math.random();
let cumulative = 0;
for (const transition of validTransitions) {
cumulative += transition.probability || 0;
if (random <= cumulative) {
return this.currentWorkflow.find(n => n.id === transition.to) || null;
}
}
}
return null;
}
findBestNode(context, workflow) {
// Find best node based on current context
let bestNode = null;
let bestScore = -Infinity;
for (const node of workflow) {
const score = this.scoreNode(node, context);
if (score > bestScore) {
bestScore = score;
bestNode = node;
}
}
return bestNode;
}
scoreNode(node, context) {
// Score node based on context and history
let score = 0;
// Prefer unexplored nodes
const timesExecuted = context.history.filter((h) => h.nodeId === node.id).length;
score -= timesExecuted * 0.5;
// Consider success rate of similar nodes
const similarResults = this.executionHistory.filter(h => h.type === node.type);
if (similarResults.length > 0) {
const successRate = similarResults.filter(r => r.success).length / similarResults.length;
score += successRate * 2;
}
return score;
}
updateMetrics(context, result) {
const latency = result.result?.executionTime || 0;
const n = context.history.length;
context.metrics.avgLatency = (context.metrics.avgLatency * (n - 1) + latency) / n;
if (result.result?.success) {
context.metrics.successRate = (context.metrics.successRate * (n - 1) + 1) / n;
}
else {
context.metrics.successRate = (context.metrics.successRate * (n - 1)) / n;
}
}
applyLearning(node, result, learningRate) {
// Update transition probabilities based on success
if (result.result?.success) {
for (const transition of node.transitions) {
if (transition.probability) {
transition.probability = Math.min(1, transition.probability * (1 + learningRate));
}
}
}
// Store in history for future adaptations
this.executionHistory.push({
nodeId: node.id,
type: node.type,
success: result.result?.success,
timestamp: Date.now(),
});
}
}
class FeedbackHandler extends PatternHandler {
feedbackStore = new Map();
async execute(request, config, onProgress) {
const checkpoints = config?.checkpoints || [];
const escalationPolicy = config?.escalationPolicy || {
maxRetries: 3,
escalationPath: ['user', 'supervisor', 'admin'],
fallbackBehavior: 'default',
};
const feedbackConfig = config?.feedbackLoop || {
collectMetrics: true,
learningEnabled: true,
persistFeedback: true,
};
let result = await this.modelClient.synthesize(request);
let jsonResult = modelResponseToJsonValue(result);
let finalResult = result;
const checkpointsPassed = [];
const feedbackCollected = [];
let retryCount = 0;
onProgress(20);
// Check each checkpoint
for (let i = 0; i < checkpoints.length; i++) {
const checkpoint = checkpoints[i];
onProgress(20 + (i / checkpoints.length) * 60);
if (checkpoint.condition(jsonResult)) {
// Checkpoint triggered - request human feedback
const feedback = await this.requestHumanFeedback(checkpoint, result, escalationPolicy, retryCount);
if (feedback.action === 'approve') {
checkpointsPassed.push(checkpoint.id);
continue;
}
else if (feedback.action === 'reject') {
// Retry with feedback
if (retryCount < escalationPolicy.maxRetries) {
const improvedRequest = {
...request,
prompt: `${request.prompt}\n\nFeedback: ${feedback.message}\nPlease improve based on this feedback.`,
};
result = await this.modelClient.synthesize(improvedRequest);
jsonResult = modelResponseToJsonValue(result);
retryCount++;
i--; // Re-check same checkpoint
}
else {
// Max retries reached - escalate or fallback
result = await this.handleEscalation(request, result, escalationPolicy, feedback);
jsonResult = modelResponseToJsonValue(result);
}
}
else if (feedback.action === 'modify') {
// Direct modification
result = {
...result,
content: feedback.modifiedContent || result.content,
};
}
feedbackCollected.push(feedback);
// Learn from feedback if enabled
if (feedbackConfig.learningEnabled) {
await this.learnFromFeedback(checkpoint, feedback, result);
}
}
}
// Collect final metrics
if (feedbackConfig.collectMetrics) {
const metrics = this.collectMetrics(feedbackCollected, checkpointsPassed);
if (feedbackConfig.persistFeedback) {
this.persistFeedback(request, feedbackCollected, metrics);
}
finalResult = {
...result,
metadata: {
tokens: result.metadata?.tokens || 0,
latency: result.metadata?.latency || 0,
quality: result.metadata?.quality,
feedbackMetrics: metrics,
},
};
}
onProgress(100);
return {
success: true,
content: finalResult,
metadata: {
pattern: 'feedback',
checkpointsPassed: checkpointsPassed.length,
feedbackCollected: feedbackCollected.length,
retries: retryCount,
metrics: feedbackConfig.collectMetrics
? this.collectMetrics(feedbackCollected, checkpointsPassed)
: undefined,
},
};
}
async requestHumanFeedback(checkpoint, result, policy, retryCount) {
// In production, this would integrate with a UI or messaging system
// For now, simulate feedback based on quality
const quality = result.metadata?.quality || Math.random();
// Simulate timeout
await new Promise(resolve => setTimeout(resolve, Math.min(checkpoint.timeout, 100)));
if (quality > 0.8) {
return {
action: 'approve',
message: 'Quality threshold met',
timestamp: Date.now(),
};
}
else if (quality > 0.5) {
return {
action: 'modify',
message: 'Minor improvements needed',
modifiedContent: `${result.content} [Improved]`,
timestamp: Date.now(),
};
}
else {
return {
action: 'reject',
message: 'Significant improvements required',
suggestions: ['Add more detail', 'Improve clarity', 'Check accuracy'],
timestamp: Date.now(),
};
}
}
async handleEscalation(request, result, policy, feedback) {
// Escalate through the chain
for (const escalationLevel of policy.escalationPath) {
const escalatedFeedback = await this.requestEscalatedFeedback(escalationLevel, request, result, feedback);
if (escalatedFeedback.resolution) {
return escalatedFeedback.result;
}
}
// Apply fallback behavior
switch (policy.fallbackBehavior) {
case 'skip':
return result;
case 'default':
return { ...result, content: 'Default response due to escalation failure' };
case 'abort':
throw new Error('Escalation failed - aborting');
default:
return result;
}
}
async requestEscalatedFeedback(level, request, result, feedback) {
// Simulate escalated feedback
return {
resolution: Math.random() > 0.5,
result: {
...result,
content: `${result.content} [Resolved by ${level}]`,
},
};
}
async learnFromFeedback(checkpoint, feedback, result) {
// Store feedback for learning
const key = `${checkpoint.id}_feedback`;
const history = this.feedbackStore.get(key) || [];
history.push({
feedback,
result,
timestamp: Date.now(),
});
this.feedbackStore.set(key, history);
// Adjust checkpoint conditions based on feedback patterns
// This would be more sophisticated in production
}
collectMetrics(feedbackCollected, checkpointsPassed) {
return {
totalFeedback: feedbackCollected.length,
approvalRate: feedbackCollected.filter(f => f.action === 'approve').length / feedbackCollected.length,
modificationRate: feedbackCollected.filter(f => f.action === 'modify').length / feedbackCollected.length,
rejectionRate: feedbackCollected.filter(f => f.action === 'reject').length / feedbackCollected.length,
checkpointsPassedRate: checkpointsPassed.length / (checkpointsPassed.length + feedbackCollected.length),
avgResponseTime: feedbackCollected.reduce((sum, f) => sum + (f.timestamp || 0), 0) /
feedbackCollected.length,
};
}
persistFeedback(request, feedback, metrics) {
// Store feedback for future analysis
const sessionId = `session_${Date.now()}`;
this.feedbackStore.set(sessionId, {
request,
feedback,
metrics,
timestamp: Date.now(),
});
}
}
class IterativeHandler extends PatternHandler {
async execute(request, config, onProgress) {
const maxIterations = config?.maxIterations || 3;
const qualityThreshold = config?.qualityThreshold || 0.85;
const strategy = config?.refinementStrategy || 'improve';
const criteria = config?.evaluationCriteria || [
{ name: 'accuracy', weight: 0.4, evaluate: r => r.metadata?.accuracy || 0.7 },
{ name: 'completeness', weight: 0.3, evaluate: r => r.metadata?.completeness || 0.7 },
{ name: 'clarity', weight: 0.3, evaluate: r => r.metadata?.clarity || 0.7 },
];
let currentResult = await this.modelClient.synthesize(request);
let bestResult = currentResult;
let bestQuality = this.evaluateQuality(currentResult, criteria);
const iterations = [];
onProgress(20);
for (let i = 0; i < maxIterations; i++) {
const currentQuality = this.evaluateQuality(currentResult, criteria);
iterations.push({
iteration: i + 1,
quality: currentQuality,
result: currentResult,
});
// Check if quality threshold is met
if (currentQuality >= qualityThreshold) {
bestResult = currentResult;
bestQuality = currentQuality;
break;
}
onProgress(20 + (i / maxIterations) * 60);
// Apply refinement strategy
const refinementRequest = this.createRefinementRequest(request, currentResult, strategy, criteria, currentQuality);
const refinedResult = await this.modelClient.synthesize(refinementRequest);
const refinedQuality = this.evaluateQuality(refinedResult, criteria);
// Keep best result
if (refinedQuality > bestQuality) {
bestResult = refinedResult;
bestQuality = refinedQuality;
}
// Decide whether to continue from refined or try alternative approach
if (refinedQuality > currentQuality || strategy === 'regenerate') {
currentResult = refinedResult;
}
else if (strategy === 'critique') {
// Get critique and incorporate
const critiqueRequest = {
...request,
prompt: `Critique this response and suggest improvements: ${currentResult.content}`,
};
const critique = await this.modelClient.synthesize(critiqueRequest);
currentResult = {
...currentResult,
content: `${currentResult.content}\n\nCritique: ${critique.content}`,
};
}
}
// Final evaluation
const finalEvaluation = this.performFinalEvaluation(bestResult, criteria, iterations);
onProgress(100);
return {
success: bestQuality >= qualityThreshold,
content: bestResult,
metadata: {
pattern: 'iterative',
iterations: iterations.length,
finalQuality: bestQuality,
qualityThreshold,
strategy,
evaluation: finalEvaluation,
improvements: this.calculateImprovements(iterations),
},
};
}
evaluateQuality(result, criteria) {
let totalScore = 0;
let totalWeight = 0;
for (const criterion of criteria) {
const score = criterion.evaluate(result);
totalScore += score * criterion.weight;
totalWeight += criterion.weight;
}
return totalWeight > 0 ? totalScore / totalWeight : 0;
}
createRefinementRequest(originalRequest, currentResult, strategy, criteria, currentQuality) {
const weakestCriterion = this.findWeakestCriterion(currentResult, criteria);
let refinementPrompt = originalRequest.prompt;
switch (strategy) {
case 'improve':
refinementPrompt = `Improve this response, especially regarding ${weakestCriterion.name}:\n\nOriginal: ${currentResult.content}\n\nFocus on: ${weakestCriterion.name}`;
break;
case 'critique':
refinementPrompt = `Critically analyze and rewrite:\n\n${currentResult.content}\n\nCurrent quality: ${currentQuality}. Must exceed: ${currentQuality + 0.1}`;
break;
case 'regenerate':
refinementPrompt = `${originalRequest.prompt}\n\nPrevious attempt (quality: ${currentQuality}) needs improvement in ${weakestCriterion.name}. Generate a better response.`;
break;
}
return {
...originalRequest,
prompt: refinementPrompt,
metadata: {
...originalRequest.metadata,
refinementIteration: true,
targetImprovement: weakestCriterion.name,
},
};
}
findWeakestCriterion(result, criteria) {
let weakest = criteria[0];
let weakestScore = weakest.evaluate(result);
for (const criterion of criteria) {
const score = criterion.evaluate(result);
if (score < weakestScore) {
weakest = criterion;
weakestScore = score;
}
}
return weakest;
}
performFinalEvaluation(result, criteria, iterations) {
return {
criteria: criteria.map(c => ({
name: c.name,
weight: c.weight,
score: c.evaluate(result),
})),
convergence: this.calculateConvergence(iterations),
efficiency: iterations.length > 0 ? iterations[0].quality / iterations.length : 0,
};
}
calculateConvergence(iterations) {
if (iterations.length < 2)
return 1;
const improvements = [];
for (let i = 1; i < iterations.length; i++) {
improvements.push(iterations[i].quality - iterations[i - 1].quality);
}
const avgImprovement = improvements.reduce((a, b) => a + b, 0) / improvements.length;
return Math.max(0, Math.min(1, avgImprovement * 10));
}
calculateImprovements(iterations) {
const improvements = [];
for (let i = 1; i < iterations.length; i++) {
improvements.push({
from: i,
to: i + 1,
qualityDelta: iterations[i].quality - iterations[i - 1].quality,
percentImprovement: ((iterations[i].quality - iterations[i - 1].quality) / iterations[i - 1].quality) * 100,
});
}
return improvements;
}
}
class BranchingHandler extends PatternHandler {
async execute(request, config, onProgress) {
const branches = config?.branches || [];
const defaultBranch = config?.defaultBranch;
const evaluationOrder = config?.evaluationOrder || 'sequential';
const context = {
request,
timestamp: Date.now(),
environment: process.env,
previousResults: [],
};
onProgress(10);
// Sort branches by priority
const sortedBranches = branches.sort((a, b) => b.priority - a.priority);
let selectedBranch = null;
let evaluationResults = [];
if (evaluationOrder === 'sequential') {
// Evaluate branches sequentially
for (const branch of sortedBranches) {
onProgress(10 + (sortedBranches.indexOf(branch) / sortedBranches.length) * 30);
const evaluation = await this.evaluateBranch(branch, context);
evaluationResults.push(evaluation);
if (evaluation.matches) {
selectedBranch = branch;
break;
}
}
}
else {
// Evaluate branches in parallel
const evaluations = await Promise.all(sortedBranches.map(branch => this.evaluateBranch(branch, context)));
evaluationResults = evaluations;
// Find first matching branch (already sorted by priority)
for (let i = 0; i < sortedBranches.length; i++) {
if (evaluations[i].matches) {
selectedBranch = sortedBranches[i];
break;
}
}
onProgress(40);
}
// Use default branch if no conditions matched
if (!selectedBranch && defaultBranch) {
selectedBranch = branches.find(b => b.id === defaultBranch);
}
if (!selectedBranch) {
return {
success: false,
content: 'No branch conditions matched and no default branch specified',
metadata: {
pattern: 'branching',
evaluationResults,
},
};
}
onProgress(50);
// Execute selected branch workflow
const branchResult = await this.executeBranchWorkflow(selectedBranch.workflow, request, progress => onProgress(50 + progress * 0.4));
onProgress(100);
return {
success: true,
content: branchResult,
metadata: {
pattern: 'branching',
selectedBranch: selectedBranch.id,
evaluationOrder,
evaluationResults,
branchMetadata: branchResult.metadata,
},
};
}
async evaluateBranch(branch, context) {
try {
// Evaluate condition
const matches = branch.condition(context);
// For complex conditions, we might want to use the model
if (typeof branch.condition === 'string') {
const evaluationRequest = {
prompt: `Evaluate if this condition is true: ${branch.condition}\nContext: ${JSON.stringify(context)}`,
maxTokens: 10,
};
const result = await this.modelClient.synthesize(evaluationRequest);
const evaluation = result.content.toLowerCase().includes('true');
return {
branchId: branch.id,
matches: evaluation,
evaluation: result.content,
};
}
return {
branchId: branch.id,
matches,
evaluation: matches ? 'Condition met' : 'Condition not met',
};
}
catch (error) {
return {
branchId: branch.id,
matches: false,
evaluation: `Error: ${getErrorMessage(error)}`,
};
}
}
async executeBranchWorkflow(workflow, request, onProgress) {
// Execute the workflow associated with the branch
const orchestrator = new WorkflowOrchestrator(this.modelClient);
return await orchestrator.executePattern(workflow.type, request, workflow.config);
}
}
class StreamingHandler extends PatternHandler {
streamBuffer = [];
processedChunks = [];
async execute(request, config, onProgress) {
const streamingEnabled = config?.streamingEnabled !== false;
const chunkSize = config?.chunkSize || 100;
const bufferStrategy = config?.bufferStrategy || 'sliding';
const onChunk = config?.onChunk || (chunk => { });
if (!streamingEnabled) {
// Non-streaming execution
const result = await this.modelClient.synthesize(request);
onProgress(100);
return {
success: true,
content: result,
metadata: { pattern: 'streaming', streamed: false },
};
}
// Initialize buffer based on strategy
const buffer = this.createBuffer(bufferStrategy, chunkSize * 10);
// Create streaming request
const streamRequest = {
...request,
stream: true,
};
onProgress(10);
try {
// Get stream from model
const streamResponse = await this.modelClient.streamRequest(streamRequest, token => {
// Process streaming token
if (token.content) {
buffer.add(token.content);
}
}, { workingDirectory: '.', config: {}, files: [] });
let totalChunks = 0;
let accumulatedContent = '';
const startTime = Date.now();
// The streaming is handled by the onToken callback above
// streamResponse contains the final result
totalChunks = 1;
accumulatedContent = streamResponse.content || '';
// Process final result
const processed = await this.processChunk(accumulatedContent, buffer, config);
this.processedChunks.push(processed);
// Call chunk handler
onChunk(processed);
// Update progress
const estimatedProgress = Math.min(90, 10 + totalChunks * 2);
onProgress(estimatedProgress);
// Apply backpressure if needed
if (buffer.isFull()) {
await this.applyBackpressure(buffer);
}
// Final processing
const finalResult = await this.finalizeStream(accumulatedContent, this.processedChunks, buffer);
onProgress(100);
return {
success: true,
content: finalResult,
metadata: {
pattern: 'streaming',
streamed: true,
totalChunks,
bufferStrategy,
processingTime: Date.now() - startTime,
averageChunkSize: accumulatedContent.length / totalChunks,
bufferStats: buffer.getStats(),
},
};
}
catch (error) {
return {
success: false,
content: `Streaming error: ${getErrorMessage(error)}`,
metadata: {
pattern: 'streaming',
error: getErrorMessage(error),
chunksProcessed: this.processedChunks.length,
},
};
}
}
createBuffer(strategy, maxSize) {
switch (strategy) {
case 'window':
return new WindowBuffer(maxSize);
case 'sliding':
return new SlidingBuffer(maxSize);
case 'circular':
return new CircularBuffer(maxSize);
default:
return new SlidingBuffer(maxSize);
}
}
async processChunk(chunk, buffer, config) {
// Process individual chunk
const processed = {
content: chunk.text || chunk.content || chunk,
timestamp: Date.now(),
size: JSON.stringify(chunk).length,
metadata: chunk.metadata || {},
};
// Apply any transformations
if (config.transform) {
processed.content = config.transform(processed.content);
}
return processed;
}
async applyBackpressure(buffer) {
// Wait for buffer to have space
const waitTime = Math.min(100, buffer.size() / 10);
await new Promise(resolve => setTimeout(resolve, waitTime));
// Process some items from buffer
const toProcess = Math.min(10, buffer.size() / 4);
for (let i = 0; i < toProcess; i++) {
buffer.remove();
}
}
async finalizeStream(content, chunks, buffer) {
// Final processing of accumulated stream
return {
content,
chunks: chunks.length,
finalBuffer: buffer.getAll(),
};
}
}
// Buffer implementations
class StreamBuffer {
buffer = [];
maxSize;
constructor(maxSize) {
this.maxSize = maxSize;
}
size() {
return this.buffer.length;
}
getAll() {
return [...this.buffer];
}
getStats() {
return {
size: this.buffer.length,
maxSize: this.maxSize,
utilization: this.buffer.length / this.maxSize,
};
}
}
class SlidingBuffer extends StreamBuffer {
add(item) {
this.buffer.push(item);
if (this.buffer.length > this.maxSize) {
this.buffer.shift();
}
}
remove() {
return this.buffer.shift();
}
isFull() {
return this.buffer.length >= this.maxSize;
}
}
class WindowBuffer extends StreamBuffer {
windowStart = 0;
add(item) {
if (this.buffer.length >= this.maxSize) {
this.buffer = [];
this.windowStart += this.maxSize;
}
this.buffer.push(item);
}
remove() {
return this.buffer.shift();
}
isFull() {
return this.buffer.length >= this.maxSize;
}
}
class CircularBuffer extends StreamBuffer {
writeIndex