UNPKG

neuronetwork

Version:

A hybrid math & neural engine with classic functions and AI computations

178 lines (151 loc) 4.48 kB
import NeuroNetwork from './index.esm.js'; const { NeuroMath } = NeuroNetwork; // **Optimizer functions** const weights = [ 0.5, -1.0, // output neuron 1 1.5, 2.0 // output neuron 2 ]; const gradients = [0.01, 0.02, -0.01]; const m = [0, 0, 0]; const v = [0, 0, 0]; const lr = 0.01; const result = await NeuroMath.Optimizer.Adam({ weights, gradients, m, v, lr: 0.001, beta1: 0.9, beta2: 0.999, epsilon: 1e-8, timestep: 1 }); console.log("🔻 Adam | Updated Weights:", result); const updated = await NeuroMath.Optimizer.SGD({ weights, gradients, learningRate: lr }); console.log("🔻 SGD | Updated Weights:", updated); // 🔻 Updated Weights: [ 0.49900001287460327, 0.9980000257492065, -0.49900001287460327 ] // **layers functions** const input = [1, 2]; // shape: [2] const input2D = [ 1, 2, 3, 4, 5, 6 ]; // shape: [3 x 2] const kernel = [ 1, 0, -1, 1, 0, -1, 1, 0, -1 ]; // 3x3 filter const input5X = [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25 ]; // 5x5 image const embeddingInput = [0, 2]; const embeddingMatrix = [ 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9 ]; const vocabSize = 3; const embeddingDim = 3; const bias = [0.1, -0.3]; let rate = 0.25; const gamma = [1, 1, 1]; const beta = [0, 0, 0]; const epsilon = 1e-5; // 🔹 BatchNormalization const output = await NeuroMath.Layer.BatchNormalization({ input, gamma, beta, epsilon }); console.log("🔻 BatchNormalization Output:", output); // 🔻 BatchNormalization Output: [ -0.9999799728393555, 0.9999799728393555 ] // 🔹 Embedding const embeddingOutput = await NeuroMath.Layer.Embedding({ input: embeddingInput, embeddingMatrix, vocabSize, embeddingDim }); console.log("🔻 Embedding Output:", embeddingOutput); // 🔻 Embedding Output: [ // 0.10000000149011612, // 0.20000000298023224, // 0.30000001192092896, // 0.699999988079071, // 0.800000011920929, // 0.8999999761581421 // ] // 🔹 Conv2D const Conv2D = await NeuroMath.Layer.Conv2D({ input: input5X, inputWidth: 5, inputHeight: 5, kernel, kernelSize: 3, stride: 1, padding: 0 }); console.log("🔻 Conv2D Output:", Conv2D); // 🔻 Conv2D Output: [ // -6, -6, -6, -6, -6, // -6, -6, -6, -6 // ] // 🔹 Flatten const flattened = await NeuroMath.Layer.Flatten({ input: input2D }); console.log("🔻 Flatten Output:", flattened); // 🔹 Dropout const dropped = await NeuroMath.Layer.Dropout({ input, rate }); console.log("🔻 Dropout Output:", dropped); // 🔹 Dense const denseOutput = await NeuroMath.Layer.Dense({ input, weights, bias, }); console.log("🔻 Dense Output:", await NeuroMath.Activation.ReLu({ input: denseOutput })); // **Loss functions** const predicted = [0.9, 0.8]; const actual = [1, 1]; // MSE const lossMSE = await NeuroMath.Loss.MSE({ predicted, target: actual }); // BSE const lossBCE = await NeuroMath.Loss.BCE({ predicted, actual }); // CCE const lossCCE = await NeuroMath.Loss.CCE({ predicted, actual }); // MAE const lossMAE = await NeuroMath.Loss.MAE({ predicted, actual }); console.log("🔻 MSE:", lossMSE); // 🔻 MSE: 0.02500000037252903 console.log("🔻 BCE:", lossBCE); // 🔻 BCE: 0.16425204277038574 console.log("🔻 CCE:", lossCCE); // 🔻 CCE: 0.3285040855407715 console.log("🔻 MAE:", lossMAE); // 🔻 MAE: 0.15000000596046448 // **Activation functions** const inputAct = [1.0, 2.0, 3.0]; const ReLu = await NeuroMath.Activation.ReLu({ inputAct }); const LeakyReLU = await NeuroMath.Activation.LeakyReLU({ inputAct }); const sigmoid = await NeuroMath.Activation.Sigmoid({ inputAct }); const tanh = await NeuroMath.Activation.Tanh({ inputAct }); const softmax = await NeuroMath.Activation.Softmax({ inputAct }); const Swish = await NeuroMath.Activation.Swish({ inputAct }); const GELU = await NeuroMath.Activation.GELU({ inputAct }); const ELU = await NeuroMath.Activation.ELU({ inputAct }); console.log("ReLu:", ReLu); console.log("LeakyReLU:", LeakyReLU); console.log("Sigmoid:", sigmoid); console.log("Tanh:", tanh); console.log("Softmax:", softmax); console.log("Swish:", Swish); console.log("GELU:", GELU); console.log("ELU:", ELU);