@jsmlt/jsmlt
Version:
JavaScript Machine Learning
276 lines (223 loc) • 10.5 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.NN = undefined;
var _slicedToArray = function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"]) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }; }();
var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _base = require('../base');
var _arrays = require('../../arrays');
var Arrays = _interopRequireWildcard(_arrays);
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key]; } } newObj.default = obj; return newObj; } }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } // Internal dependencies
/**
* Calculate the logit function for an input
*
* @param {number} x - Input number
* @return {number} Output of logit function applied on input
*/
function sigmoid(x) {
return 1 / (1 + Math.exp(-x));
}
var NN = exports.NN = function (_Classifier) {
_inherits(NN, _Classifier);
/**
* Constructor. Initialize class members and store user-defined options.
*
* @param {Object} [optionsUser] User-defined options
* @param {trackAccuracy} [optionsUser.trackAccuracy = false] Whether to track accuracy during the
* training process. This will let the perceptron keep track of the error rate on the test set
* in each training iteration
*/
function NN() {
var optionsUser = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
_classCallCheck(this, NN);
// Parse options
var _this = _possibleConstructorReturn(this, (NN.__proto__ || Object.getPrototypeOf(NN)).call(this));
var optionsDefault = {
numInputs: 10,
numOutputs: 2,
numEpochs: 20
};
var options = _extends({}, optionsDefault, optionsUser);
_this.numInputs = options.numInputs;
_this.numOutputs = options.numOutputs;
_this.numEpochs = options.numEpochs;
// Initialize to empty layers
_this.layers = [];
_this.weights = [];
return _this;
}
/**
* Randomly initialize the weights for the neural network. For each subsequent pair of layers,
* where the first has n nodes and the second n' nodes, initialize an matrix with n rows and n'
* columns. Each cell in the matrix is assigned a random value in the range [-1, 1].
*
* The weights between layer k and layer k + 1 are stored in element k (starting at k = 0) of the
* weights array.
*/
_createClass(NN, [{
key: 'initializeWeights',
value: function initializeWeights() {
// Initialize weights for each subsequent pair of layers
for (var i = 0; i < this.layers.length - 1; i++) {
// Initialize weights from this layer to the next layer to a random real number in the
// range [0, 1]
this.weights.push(Arrays.random([this.layers[i].length, this.layers[i + 1].length], -1, 1));
}
}
}, {
key: 'train',
value: function train(X, y) {
// Initialize weights arrays
this.initializeWeights();
// Train for specified number of epochs
for (var i = 0; i < this.numEpochs; i++) {
this.trainEpoch(X, y);
}
}
}, {
key: 'trainEpoch',
value: function trainEpoch(X, y) {
// Shuffle data points
var _Arrays$shuffle = Arrays.shuffle(X, y),
_Arrays$shuffle2 = _slicedToArray(_Arrays$shuffle, 2),
XUse = _Arrays$shuffle2[0],
yUse = _Arrays$shuffle2[1];
// Train for each sample individually
for (var i = 0; i < XUse.length; i += 1) {
this.trainSample(XUse[i].slice(), y[i]);
}
}
}, {
key: 'trainSample',
value: function trainSample(x, y) {
var _this2 = this;
// Pass the sample through the network
var _forwardPass = forwardPass(x),
_forwardPass2 = _slicedToArray(_forwardPass, 2),
activations = _forwardPass2[0],
outputs = _forwardPass2[1];
// Calculate deltas using the generalized delta rule
var deltas = this.layers.map(function (x) {
return Arrays.zeros(x);
});
// Start at the final layer, and calculate deltas going backward until the second layer
var _loop = function _loop(k) {
var _loop2 = function _loop2(_i) {
// Extract output and activation for this node
activation = activations[k][_i];
output = outputs[k][_i];
// Last layer
if (k == _this2.layers.length - 1) {
var target = y == _i;
deltas[k][_i] = activationFunctionDerivative(activation) * (output - target);
}
// Earlier layers
else {
// Calculate sum of weighted deltas in next layer
var nextDeltaSum = deltas[k + 1].reduce(function (r, a, j) {
return r + a * _this2.weights[k][_i][j];
}, 0);
deltas[k][_i] = activationFunctionDerivative(activation) * nextDeltaSum;
}
};
for (var _i = 0; _i < _this2.layers[k]; _i++) {
_loop2(_i);
}
};
for (var k = this.layers.length - 1; k > 0; k--) {
_loop(k);
}
// Update weights
for (var k = 0; k < this.layers.length - 1; k++) {
// Loop over all pairs of nodes in layers k and k + 1
for (var i = 0; i < this.layers[k]; i++) {
for (var j = 0; j < this.layers[k + 1]; j++) {
// Update weights
this.weights[k][i][j] -= this.learningRate * outputs[k][i][j] * deltas[k + 1][j];
}
}
}
}
/**
* Pass a sample through the network, calculating the activations and outputs for all nodes in the
* network.
*
* @param {Array.<number>} x - Data point features
* @return {Array} - Array with two elements: containing the activations and outputs,
* respectively, for each node in the network
*/
}, {
key: 'forwardPass',
value: function forwardPass(x) {
var _this3 = this;
if (x.length != this.numInputs) {
throw new Error('Number of features of samples should match the number of network inputs.');
}
// Output and activations of nodes in each layer
outputs = this.layers.map(function (x) {
return Arrays.zeros(x);
});
activations = this.layers.map(function (x) {
return Arrays.zeros(x);
});
// Fill the outputs of the first layer with the sample features, and initialize the activations
// of the first layer to an empty list
outputs[0] = x.slice();
activations[0] = [];
// Propagate the inputs layer-by-layer
var _loop3 = function _loop3(layer) {
var _loop4 = function _loop4(node) {
// Calculate the activation as the weighted sum of the inputs in the previous layer
activations[layer][node] = outputs[layer - 1].reduce(function (r, a, i) {
return r + a * _this3.weights[layer - 1][i][node];
}, 0);
// Calculate the output of this node by applying the activation function to the activation
outputs[layer][node] = _this3.activationFunction(activations[layer][node]);
};
// Calculate the activation and output of each node in the layer
for (var node = 0; node < _this3.layers[layer]; node++) {
_loop4(node);
}
};
for (var layer = 1; layer < this.layers.length; layer++) {
_loop3(layer);
}
return [activations, outputs];
}
}, {
key: 'activationFunction',
value: function activationFunction(a) {
return sigmoid(a);
}
}, {
key: 'activationFunctionDerivative',
value: function activationFunctionDerivative(a) {
return sigmoid(a) * (1 - sigmoid(a));
}
}, {
key: 'addLayer',
value: function addLayer(numNodes) {
this.layers.push(numNodes);
}
/**
* Make a prediction for a data set.
*
* @param {Array.Array.<number>} X - Data set to make predictions for
* @return {Array.<number>} Predictions
*/
}, {
key: 'predict',
value: function predict(X) {
var _this4 = this;
return X.map(function (x) {
return Arrays.argmax(_this4.forwardPass(x));
});
}
}]);
return NN;
}(_base.Classifier);