claude-flow
Version:
Ruflo - Enterprise AI agent orchestration for Claude Code. Deploy 60+ specialized agents in coordinated swarms with self-learning, fault-tolerant consensus, vector memory, and MCP integration
497 lines • 15.3 kB
JavaScript
/**
* RuVector Training Service
* Real WASM-accelerated neural training using @ruvector packages
*
* Features:
* - MicroLoRA: <1µs adaptation with rank-2 LoRA (2.3M ops/s)
* - SONA: Self-Optimizing Neural Architecture (624k learn/s, 60k search/s)
* - Flash Attention: 2.49x-7.47x speedup (9k ops/s)
* - Trajectory Buffer: Learning from success/failure
* - Contrastive Learning: InfoNCE loss
*
* Backward Compatible: All v1 APIs preserved, SONA adds new capabilities
*
* Created with ❤️ by ruv.io
*/
// Lazy-loaded WASM modules
let microLoRA = null;
let scopedLoRA = null;
let trajectoryBuffer = null;
let flashAttention = null;
let moeAttention = null;
let hyperbolicAttention = null;
let optimizer = null;
let contrastiveLoss = null;
let curriculum = null;
let hardMiner = null;
// SONA engine (optional enhancement)
let sonaEngine = null;
let sonaAvailable = false;
// Training state
let initialized = false;
let totalAdaptations = 0;
let totalForwards = 0;
let totalSonaLearns = 0;
let totalSonaSearches = 0;
let lastBenchmark = null;
/**
* Initialize the RuVector training system
*/
export async function initializeTraining(config = {}) {
const features = [];
const dim = Math.min(config.dim || 256, 256); // Max 256 for WASM
const lr = config.learningRate || 0.01;
const alpha = config.alpha || 0.1;
try {
// Initialize MicroLoRA with direct WASM loading (Node.js compatible)
const fs = await import('fs');
const { createRequire } = await import('module');
const require = createRequire(import.meta.url);
// Load WASM file directly instead of using fetch
const wasmPath = require.resolve('@ruvector/learning-wasm/ruvector_learning_wasm_bg.wasm');
const wasmBuffer = fs.readFileSync(wasmPath);
const learningWasm = await import('@ruvector/learning-wasm');
learningWasm.initSync({ module: wasmBuffer });
microLoRA = new learningWasm.WasmMicroLoRA(dim, alpha, lr);
features.push(`MicroLoRA (${dim}-dim, <1μs adaptation)`);
// Initialize ScopedLoRA for per-operator learning
scopedLoRA = new learningWasm.WasmScopedLoRA(dim, alpha, lr);
scopedLoRA.set_category_fallback(true);
features.push('ScopedLoRA (17 operators)');
// Initialize trajectory buffer
trajectoryBuffer = new learningWasm.WasmTrajectoryBuffer(config.trajectoryCapacity || 10000, dim);
features.push('TrajectoryBuffer');
// Initialize attention mechanisms
const attention = await import('@ruvector/attention');
if (config.useFlashAttention !== false) {
flashAttention = new attention.FlashAttention(dim, 64);
features.push('FlashAttention');
}
if (config.useMoE) {
moeAttention = attention.MoEAttention.simple(dim, 8, 2);
features.push('MoE (8 experts, top-2)');
}
if (config.useHyperbolic) {
hyperbolicAttention = new attention.HyperbolicAttention(dim, 1.0);
features.push('HyperbolicAttention');
}
// Initialize optimizer and loss
optimizer = new attention.AdamWOptimizer(lr, 0.9, 0.999, 1e-8, 0.01);
features.push('AdamW Optimizer');
contrastiveLoss = new attention.InfoNceLoss(0.07);
features.push('InfoNCE Loss');
// Curriculum scheduler
if (config.totalSteps) {
curriculum = new attention.CurriculumScheduler(config.totalSteps, config.warmupSteps || Math.floor(config.totalSteps * 0.1));
features.push('Curriculum Learning');
}
// Hard negative mining - use string for MiningStrategy enum due to NAPI binding quirk
try {
hardMiner = new attention.HardNegativeMiner(5, 'semi_hard');
features.push('Hard Negative Mining');
}
catch {
// Mining not available, continue without it
}
// Initialize SONA (optional, backward compatible)
if (config.useSona !== false) {
try {
const sona = await import('@ruvector/sona');
const sonaRank = config.sonaRank || 4;
// SonaEngine constructor: (dim, rank, alpha, learningRate) - TypeScript types are wrong
// @ts-expect-error - SonaEngine accepts 4 positional args but types say 1
sonaEngine = new sona.SonaEngine(dim, sonaRank, alpha, lr);
sonaAvailable = true;
features.push(`SONA (${dim}-dim, rank-${sonaRank}, 624k learn/s)`);
}
catch (sonaError) {
// SONA not available, continue without it (backward compatible)
sonaAvailable = false;
// Only log if explicitly requested
if (config.useSona === true) {
console.warn('SONA requested but not available:', sonaError);
}
}
}
initialized = true;
return { success: true, features };
}
catch (error) {
return {
success: false,
features,
error: error instanceof Error ? error.message : String(error),
};
}
}
/**
* Operator types for scoped LoRA (0-16)
*/
export const OperatorType = {
GENERAL: 0,
ATTENTION: 1,
MLP: 2,
EMBEDDING: 3,
NORMALIZATION: 4,
PROJECTION: 5,
POOLING: 6,
CONVOLUTION: 7,
RECURRENT: 8,
ROUTING: 9,
MEMORY: 10,
REASONING: 11,
COORDINATION: 12,
OPTIMIZATION: 13,
SECURITY: 14,
TESTING: 15,
DEBUGGING: 16,
};
/**
* Train a pattern with MicroLoRA
*/
export async function trainPattern(embedding, gradient, operatorType) {
if (!initialized || !microLoRA) {
throw new Error('Training system not initialized');
}
// Use scoped LoRA if operator type specified
if (operatorType !== undefined && scopedLoRA) {
scopedLoRA.adapt_array(operatorType, gradient);
return {
deltaNorm: scopedLoRA.delta_norm(operatorType),
adaptCount: scopedLoRA.adapt_count(operatorType),
};
}
// Standard MicroLoRA adaptation
microLoRA.adapt_array(gradient);
totalAdaptations++;
return {
deltaNorm: microLoRA.delta_norm(),
adaptCount: microLoRA.adapt_count(),
};
}
/**
* Forward pass through LoRA
*/
export function forward(input, operatorType) {
if (!initialized || !microLoRA) {
throw new Error('Training system not initialized');
}
totalForwards++;
if (operatorType !== undefined && scopedLoRA) {
return scopedLoRA.forward_array(operatorType, input);
}
return microLoRA.forward_array(input);
}
/**
* Reward-based adaptation (reinforcement learning)
*/
export function adaptWithReward(improvement, operatorType) {
if (!initialized) {
throw new Error('Training system not initialized');
}
if (operatorType !== undefined && scopedLoRA) {
scopedLoRA.adapt_with_reward(operatorType, improvement);
}
else if (microLoRA) {
microLoRA.adapt_with_reward(improvement);
}
totalAdaptations++;
}
/**
* Record a learning trajectory
*/
export function recordTrajectory(embedding, operatorType, attentionType, executionMs, baselineMs) {
if (!trajectoryBuffer) {
throw new Error('Trajectory buffer not initialized');
}
trajectoryBuffer.record(embedding, operatorType, attentionType, executionMs, baselineMs);
}
/**
* Get trajectory statistics
*/
export function getTrajectoryStats() {
if (!trajectoryBuffer || trajectoryBuffer.is_empty()) {
return null;
}
return {
successRate: trajectoryBuffer.success_rate(),
meanImprovement: trajectoryBuffer.mean_improvement(),
bestImprovement: trajectoryBuffer.best_improvement(),
totalCount: trajectoryBuffer.total_count(),
highQualityCount: trajectoryBuffer.high_quality_count(0.1),
variance: trajectoryBuffer.variance(),
};
}
/**
* Compute attention with Flash Attention (2.49x-7.47x faster)
*/
export function computeFlashAttention(query, keys, values) {
if (!flashAttention) {
throw new Error('Flash attention not initialized');
}
return flashAttention.computeRaw(query, keys, values);
}
/**
* Compute MoE routing
*/
export function computeMoEAttention(query, keys, values) {
if (!moeAttention) {
throw new Error('MoE attention not initialized');
}
return moeAttention.computeRaw(query, keys, values);
}
/**
* Compute hyperbolic attention (for hierarchical patterns)
*/
export function computeHyperbolicAttention(query, keys, values) {
if (!hyperbolicAttention) {
throw new Error('Hyperbolic attention not initialized');
}
return hyperbolicAttention.computeRaw(query, keys, values);
}
/**
* Compute contrastive loss for training
*/
export function computeContrastiveLoss(anchor, positives, negatives) {
if (!contrastiveLoss) {
throw new Error('Contrastive loss not initialized');
}
const loss = contrastiveLoss.compute(anchor, positives, negatives);
const gradient = contrastiveLoss.backward(anchor, positives, negatives);
return { loss, gradient };
}
/**
* Optimizer step
*/
export function optimizerStep(params, gradients) {
if (!optimizer) {
throw new Error('Optimizer not initialized');
}
return optimizer.step(params, gradients);
}
/**
* Get curriculum difficulty for current step
*/
export function getCurriculumDifficulty(step) {
if (!curriculum) {
return 1.0; // Full difficulty if no curriculum
}
return curriculum.getDifficulty(step);
}
/**
* Mine hard negatives for better training
*/
export function mineHardNegatives(anchor, candidates) {
if (!hardMiner) {
throw new Error('Hard negative miner not initialized');
}
return hardMiner.mine(anchor, candidates);
}
/**
* Benchmark the training system
*/
export async function benchmarkTraining(dim, iterations) {
const attention = await import('@ruvector/attention');
lastBenchmark = attention.benchmarkAttention(dim || 256, 100, iterations || 1000);
return lastBenchmark ?? [];
}
// ============================================
// SONA Functions (v2 enhancement, optional)
// ============================================
/**
* Check if SONA is available
*/
export function isSonaAvailable() {
return sonaAvailable && sonaEngine !== null;
}
/**
* Force-learn a pattern with SONA (1.6μs, 624k ops/s)
* This is a one-shot learning mechanism for immediate pattern storage
*/
export function sonaForceLearn(embedding, reward) {
if (!sonaEngine) {
throw new Error('SONA not initialized. Call initializeTraining with useSona: true');
}
sonaEngine.forceLearn(embedding, reward);
totalSonaLearns++;
}
/**
* Search for similar patterns with SONA (16.7μs, 60k searches/s)
* Returns the k most similar patterns from the pattern bank
*/
export function sonaFindPatterns(embedding, k = 5) {
if (!sonaEngine) {
throw new Error('SONA not initialized. Call initializeTraining with useSona: true');
}
// SONA requires Array, not Float32Array
const embeddingArray = Array.from(embedding);
totalSonaSearches++;
return sonaEngine.findPatterns(embeddingArray, k);
}
/**
* Process SONA background tasks (0.13μs, 7.5M ticks/s)
* Call periodically to process background learning and consolidation
*/
export function sonaTick() {
if (!sonaEngine) {
return; // Silent no-op if SONA not available
}
sonaEngine.tick();
}
/**
* Get SONA statistics
*/
export function getSonaStats() {
if (!sonaEngine) {
return {
available: false,
enabled: false,
stats: null,
totalLearns: totalSonaLearns,
totalSearches: totalSonaSearches,
};
}
try {
const statsJson = sonaEngine.getStats();
const stats = JSON.parse(statsJson);
return {
available: true,
enabled: sonaEngine.isEnabled(),
stats,
totalLearns: totalSonaLearns,
totalSearches: totalSonaSearches,
};
}
catch {
return {
available: true,
enabled: false,
stats: null,
totalLearns: totalSonaLearns,
totalSearches: totalSonaSearches,
};
}
}
/**
* Enable/disable SONA learning
*/
export function setSonaEnabled(enabled) {
if (!sonaEngine) {
return;
}
sonaEngine.setEnabled(enabled);
}
/**
* Flush SONA buffers (persist any pending patterns)
*/
export function sonaFlush() {
if (!sonaEngine) {
return;
}
sonaEngine.flush();
}
/**
* Get training statistics
*/
export function getTrainingStats() {
const stats = {
initialized,
totalAdaptations,
totalForwards,
};
if (microLoRA) {
stats.microLoraStats = {
paramCount: microLoRA.param_count(),
adaptCount: microLoRA.adapt_count(),
forwardCount: microLoRA.forward_count(),
deltaNorm: microLoRA.delta_norm(),
};
}
if (scopedLoRA) {
stats.scopedLoraStats = {
totalAdaptCount: scopedLoRA.total_adapt_count(),
totalForwardCount: scopedLoRA.total_forward_count(),
};
}
if (trajectoryBuffer && !trajectoryBuffer.is_empty()) {
stats.trajectoryStats = getTrajectoryStats();
}
// Include SONA stats if available
if (sonaAvailable) {
stats.sonaStats = getSonaStats();
}
if (lastBenchmark) {
stats.lastBenchmark = lastBenchmark;
}
return stats;
}
/**
* Reset the training system
*/
export function resetTraining() {
if (microLoRA)
microLoRA.reset();
if (scopedLoRA)
scopedLoRA.reset_all();
if (trajectoryBuffer)
trajectoryBuffer.reset();
// Reset SONA stats (engine doesn't have reset, just flush)
if (sonaEngine) {
sonaEngine.flush();
}
totalAdaptations = 0;
totalForwards = 0;
totalSonaLearns = 0;
totalSonaSearches = 0;
}
/**
* Export trained weights
*/
export function exportWeights() {
if (!initialized || !microLoRA) {
return null;
}
return {
dim: microLoRA.dim(),
deltaNorm: microLoRA.delta_norm(),
adaptCount: microLoRA.adapt_count(),
trajectoryStats: getTrajectoryStats(),
};
}
/**
* Cleanup resources
*/
export function cleanup() {
if (microLoRA) {
microLoRA.free();
microLoRA = null;
}
if (scopedLoRA) {
scopedLoRA.free();
scopedLoRA = null;
}
if (trajectoryBuffer) {
trajectoryBuffer.free();
trajectoryBuffer = null;
}
// Cleanup SONA
if (sonaEngine) {
sonaEngine.flush();
sonaEngine = null;
sonaAvailable = false;
}
flashAttention = null;
moeAttention = null;
hyperbolicAttention = null;
optimizer = null;
contrastiveLoss = null;
curriculum = null;
hardMiner = null;
initialized = false;
totalAdaptations = 0;
totalForwards = 0;
totalSonaLearns = 0;
totalSonaSearches = 0;
lastBenchmark = null;
}
//# sourceMappingURL=ruvector-training.js.map