synaptic
Version:
architecture-free neural network library
1,435 lines (1,186 loc) • 108 kB
JavaScript
/*!
* The MIT License (MIT)
*
* Copyright (c) 2017 Juan Cazala - https://caza.la
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE
*
*
*
* ********************************************************************************************
* SYNAPTIC (v1.1.1)
* ********************************************************************************************
*
* Synaptic is a javascript neural network library for node.js and the browser, its generalized
* algorithm is architecture-free, so you can build and train basically any type of first order
* or even second order neural network architectures.
*
* http://en.wikipedia.org/wiki/Recurrent_neural_network#Second_Order_Recurrent_Neural_Network
*
* The library includes a few built-in architectures like multilayer perceptrons, multilayer
* long-short term memory networks (LSTM) or liquid state machines, and a trainer capable of
* training any given network, and includes built-in training tasks/tests like solving an XOR,
* passing a Distracted Sequence Recall test or an Embeded Reber Grammar test.
*
* The algorithm implemented by this library has been taken from Derek D. Monner's paper:
*
*
* A generalized LSTM-like training algorithm for second-order recurrent neural networks
* http://www.overcomplete.net/papers/nn2012.pdf
*
* There are references to the equations in that paper commented through the source code.
*
*/
(function webpackUniversalModuleDefinition(root, factory) {
if(typeof exports === 'object' && typeof module === 'object')
module.exports = factory();
else if(typeof define === 'function' && define.amd)
define([], factory);
else if(typeof exports === 'object')
exports["synaptic"] = factory();
else
root["synaptic"] = factory();
})(this, function() {
return /******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId]) {
/******/ return installedModules[moduleId].exports;
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ i: moduleId,
/******/ l: false,
/******/ exports: {}
/******/ };
/******/
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/
/******/ // Flag the module as loaded
/******/ module.l = true;
/******/
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/
/******/
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/
/******/ // define getter function for harmony exports
/******/ __webpack_require__.d = function(exports, name, getter) {
/******/ if(!__webpack_require__.o(exports, name)) {
/******/ Object.defineProperty(exports, name, {
/******/ configurable: false,
/******/ enumerable: true,
/******/ get: getter
/******/ });
/******/ }
/******/ };
/******/
/******/ // getDefaultExport function for compatibility with non-harmony modules
/******/ __webpack_require__.n = function(module) {
/******/ var getter = module && module.__esModule ?
/******/ function getDefault() { return module['default']; } :
/******/ function getModuleExports() { return module; };
/******/ __webpack_require__.d(getter, 'a', getter);
/******/ return getter;
/******/ };
/******/
/******/ // Object.prototype.hasOwnProperty.call
/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
/******/
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/
/******/ // Load entry module and return exports
/******/ return __webpack_require__(__webpack_require__.s = 4);
/******/ })
/************************************************************************/
/******/ ([
/* 0 */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _LayerConnection = __webpack_require__(6);
var _LayerConnection2 = _interopRequireDefault(_LayerConnection);
var _Neuron = __webpack_require__(2);
var _Neuron2 = _interopRequireDefault(_Neuron);
var _Network = __webpack_require__(1);
var _Network2 = _interopRequireDefault(_Network);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
// types of connections
var connectionType = {
ALL_TO_ALL: "ALL TO ALL",
ONE_TO_ONE: "ONE TO ONE",
ALL_TO_ELSE: "ALL TO ELSE"
};
// types of gates
var gateType = {
INPUT: "INPUT",
OUTPUT: "OUTPUT",
ONE_TO_ONE: "ONE TO ONE"
};
var Layer = function () {
function Layer(size) {
_classCallCheck(this, Layer);
this.size = size | 0;
this.list = [];
this.connectedTo = [];
while (size--) {
var neuron = new _Neuron2.default();
this.list.push(neuron);
}
}
// activates all the neurons in the layer
_createClass(Layer, [{
key: 'activate',
value: function activate(input) {
var activations = [];
if (typeof input != 'undefined') {
if (input.length != this.size) throw new Error('INPUT size and LAYER size must be the same to activate!');
for (var id in this.list) {
var neuron = this.list[id];
var activation = neuron.activate(input[id]);
activations.push(activation);
}
} else {
for (var id in this.list) {
var neuron = this.list[id];
var activation = neuron.activate();
activations.push(activation);
}
}
return activations;
}
// propagates the error on all the neurons of the layer
}, {
key: 'propagate',
value: function propagate(rate, target) {
if (typeof target != 'undefined') {
if (target.length != this.size) throw new Error('TARGET size and LAYER size must be the same to propagate!');
for (var id = this.list.length - 1; id >= 0; id--) {
var neuron = this.list[id];
neuron.propagate(rate, target[id]);
}
} else {
for (var id = this.list.length - 1; id >= 0; id--) {
var neuron = this.list[id];
neuron.propagate(rate);
}
}
}
// projects a connection from this layer to another one
}, {
key: 'project',
value: function project(layer, type, weights) {
if (layer instanceof _Network2.default) layer = layer.layers.input;
if (layer instanceof Layer) {
if (!this.connected(layer)) return new _LayerConnection2.default(this, layer, type, weights);
} else throw new Error('Invalid argument, you can only project connections to LAYERS and NETWORKS!');
}
// gates a connection betwenn two layers
}, {
key: 'gate',
value: function gate(connection, type) {
if (type == Layer.gateType.INPUT) {
if (connection.to.size != this.size) throw new Error('GATER layer and CONNECTION.TO layer must be the same size in order to gate!');
for (var id in connection.to.list) {
var neuron = connection.to.list[id];
var gater = this.list[id];
for (var input in neuron.connections.inputs) {
var gated = neuron.connections.inputs[input];
if (gated.ID in connection.connections) gater.gate(gated);
}
}
} else if (type == Layer.gateType.OUTPUT) {
if (connection.from.size != this.size) throw new Error('GATER layer and CONNECTION.FROM layer must be the same size in order to gate!');
for (var id in connection.from.list) {
var neuron = connection.from.list[id];
var gater = this.list[id];
for (var projected in neuron.connections.projected) {
var gated = neuron.connections.projected[projected];
if (gated.ID in connection.connections) gater.gate(gated);
}
}
} else if (type == Layer.gateType.ONE_TO_ONE) {
if (connection.size != this.size) throw new Error('The number of GATER UNITS must be the same as the number of CONNECTIONS to gate!');
for (var id in connection.list) {
var gater = this.list[id];
var gated = connection.list[id];
gater.gate(gated);
}
}
connection.gatedfrom.push({ layer: this, type: type });
}
// true or false whether the whole layer is self-connected or not
}, {
key: 'selfconnected',
value: function selfconnected() {
for (var id in this.list) {
var neuron = this.list[id];
if (!neuron.selfconnected()) return false;
}
return true;
}
// true of false whether the layer is connected to another layer (parameter) or not
}, {
key: 'connected',
value: function connected(layer) {
// Check if ALL to ALL connection
var connections = 0;
for (var here in this.list) {
for (var there in layer.list) {
var from = this.list[here];
var to = layer.list[there];
var connected = from.connected(to);
if (connected.type == 'projected') connections++;
}
}
if (connections == this.size * layer.size) return Layer.connectionType.ALL_TO_ALL;
// Check if ONE to ONE connection
connections = 0;
for (var neuron in this.list) {
var from = this.list[neuron];
var to = layer.list[neuron];
var connected = from.connected(to);
if (connected.type == 'projected') connections++;
}
if (connections == this.size) return Layer.connectionType.ONE_TO_ONE;
}
// clears all the neuorns in the layer
}, {
key: 'clear',
value: function clear() {
for (var id in this.list) {
var neuron = this.list[id];
neuron.clear();
}
}
// resets all the neurons in the layer
}, {
key: 'reset',
value: function reset() {
for (var id in this.list) {
var neuron = this.list[id];
neuron.reset();
}
}
// returns all the neurons in the layer (array)
}, {
key: 'neurons',
value: function neurons() {
return this.list;
}
// adds a neuron to the layer
}, {
key: 'add',
value: function add(neuron) {
this.neurons[neuron.ID] = neuron || new _Neuron2.default();
this.list.push(neuron);
this.size++;
}
}, {
key: 'set',
value: function set(options) {
options = options || {};
for (var i in this.list) {
var neuron = this.list[i];
if (options.label) neuron.label = options.label + '_' + neuron.ID;
if (options.squash) neuron.squash = options.squash;
if (options.bias) neuron.bias = options.bias;
}
return this;
}
}]);
return Layer;
}();
Layer.connectionType = connectionType;
Layer.gateType = gateType;
exports.default = Layer;
/***/ }),
/* 1 */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; };
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _Neuron = __webpack_require__(2);
var _Neuron2 = _interopRequireDefault(_Neuron);
var _Layer = __webpack_require__(0);
var _Layer2 = _interopRequireDefault(_Layer);
var _Trainer = __webpack_require__(3);
var _Trainer2 = _interopRequireDefault(_Trainer);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
var Network = function () {
function Network(layers) {
_classCallCheck(this, Network);
if (typeof layers != 'undefined') {
this.layers = {
input: layers.input || null,
hidden: layers.hidden || [],
output: layers.output || null
};
this.optimized = null;
}
}
// feed-forward activation of all the layers to produce an ouput
_createClass(Network, [{
key: 'activate',
value: function activate(input) {
if (this.optimized === false) {
this.layers.input.activate(input);
for (var i = 0; i < this.layers.hidden.length; i++) {
this.layers.hidden[i].activate();
}return this.layers.output.activate();
} else {
if (this.optimized == null) this.optimize();
return this.optimized.activate(input);
}
}
// back-propagate the error thru the network
}, {
key: 'propagate',
value: function propagate(rate, target) {
if (this.optimized === false) {
this.layers.output.propagate(rate, target);
for (var i = this.layers.hidden.length - 1; i >= 0; i--) {
this.layers.hidden[i].propagate(rate);
}
} else {
if (this.optimized == null) this.optimize();
this.optimized.propagate(rate, target);
}
}
// project a connection to another unit (either a network or a layer)
}, {
key: 'project',
value: function project(unit, type, weights) {
if (this.optimized) this.optimized.reset();
if (unit instanceof Network) return this.layers.output.project(unit.layers.input, type, weights);
if (unit instanceof _Layer2.default) return this.layers.output.project(unit, type, weights);
throw new Error('Invalid argument, you can only project connections to LAYERS and NETWORKS!');
}
// let this network gate a connection
}, {
key: 'gate',
value: function gate(connection, type) {
if (this.optimized) this.optimized.reset();
this.layers.output.gate(connection, type);
}
// clear all elegibility traces and extended elegibility traces (the network forgets its context, but not what was trained)
}, {
key: 'clear',
value: function clear() {
this.restore();
var inputLayer = this.layers.input,
outputLayer = this.layers.output;
inputLayer.clear();
for (var i = 0; i < this.layers.hidden.length; i++) {
this.layers.hidden[i].clear();
}
outputLayer.clear();
if (this.optimized) this.optimized.reset();
}
// reset all weights and clear all traces (ends up like a new network)
}, {
key: 'reset',
value: function reset() {
this.restore();
var inputLayer = this.layers.input,
outputLayer = this.layers.output;
inputLayer.reset();
for (var i = 0; i < this.layers.hidden.length; i++) {
this.layers.hidden[i].reset();
}
outputLayer.reset();
if (this.optimized) this.optimized.reset();
}
// hardcodes the behaviour of the whole network into a single optimized function
}, {
key: 'optimize',
value: function optimize() {
var that = this;
var optimized = {};
var neurons = this.neurons();
for (var i = 0; i < neurons.length; i++) {
var neuron = neurons[i].neuron;
var layer = neurons[i].layer;
while (neuron.neuron) {
neuron = neuron.neuron;
}optimized = neuron.optimize(optimized, layer);
}
for (var i = 0; i < optimized.propagation_sentences.length; i++) {
optimized.propagation_sentences[i].reverse();
}optimized.propagation_sentences.reverse();
var hardcode = '';
hardcode += 'var F = Float64Array ? new Float64Array(' + optimized.memory + ') : []; ';
for (var i in optimized.variables) {
hardcode += 'F[' + optimized.variables[i].id + '] = ' + (optimized.variables[i].value || 0) + '; ';
}hardcode += 'var activate = function(input){\n';
for (var i = 0; i < optimized.inputs.length; i++) {
hardcode += 'F[' + optimized.inputs[i] + '] = input[' + i + ']; ';
}for (var i = 0; i < optimized.activation_sentences.length; i++) {
if (optimized.activation_sentences[i].length > 0) {
for (var j = 0; j < optimized.activation_sentences[i].length; j++) {
hardcode += optimized.activation_sentences[i][j].join(' ');
hardcode += optimized.trace_sentences[i][j].join(' ');
}
}
}
hardcode += ' var output = []; ';
for (var i = 0; i < optimized.outputs.length; i++) {
hardcode += 'output[' + i + '] = F[' + optimized.outputs[i] + ']; ';
}hardcode += 'return output; }; ';
hardcode += 'var propagate = function(rate, target){\n';
hardcode += 'F[' + optimized.variables.rate.id + '] = rate; ';
for (var i = 0; i < optimized.targets.length; i++) {
hardcode += 'F[' + optimized.targets[i] + '] = target[' + i + ']; ';
}for (var i = 0; i < optimized.propagation_sentences.length; i++) {
for (var j = 0; j < optimized.propagation_sentences[i].length; j++) {
hardcode += optimized.propagation_sentences[i][j].join(' ') + ' ';
}
}hardcode += ' };\n';
hardcode += 'var ownership = function(memoryBuffer){\nF = memoryBuffer;\nthis.memory = F;\n};\n';
hardcode += 'return {\nmemory: F,\nactivate: activate,\npropagate: propagate,\nownership: ownership\n};';
hardcode = hardcode.split(';').join(';\n');
var constructor = new Function(hardcode);
var network = constructor();
network.data = {
variables: optimized.variables,
activate: optimized.activation_sentences,
propagate: optimized.propagation_sentences,
trace: optimized.trace_sentences,
inputs: optimized.inputs,
outputs: optimized.outputs,
check_activation: this.activate,
check_propagation: this.propagate
};
network.reset = function () {
if (that.optimized) {
that.optimized = null;
that.activate = network.data.check_activation;
that.propagate = network.data.check_propagation;
}
};
this.optimized = network;
this.activate = network.activate;
this.propagate = network.propagate;
}
// restores all the values from the optimized network the their respective objects in order to manipulate the network
}, {
key: 'restore',
value: function restore() {
if (!this.optimized) return;
var optimized = this.optimized;
var getValue = function getValue() {
var args = Array.prototype.slice.call(arguments);
var unit = args.shift();
var prop = args.pop();
var id = prop + '_';
for (var property in args) {
id += args[property] + '_';
}id += unit.ID;
var memory = optimized.memory;
var variables = optimized.data.variables;
if (id in variables) return memory[variables[id].id];
return 0;
};
var list = this.neurons();
// link id's to positions in the array
for (var i = 0; i < list.length; i++) {
var neuron = list[i].neuron;
while (neuron.neuron) {
neuron = neuron.neuron;
}neuron.state = getValue(neuron, 'state');
neuron.old = getValue(neuron, 'old');
neuron.activation = getValue(neuron, 'activation');
neuron.bias = getValue(neuron, 'bias');
for (var input in neuron.trace.elegibility) {
neuron.trace.elegibility[input] = getValue(neuron, 'trace', 'elegibility', input);
}for (var gated in neuron.trace.extended) {
for (var input in neuron.trace.extended[gated]) {
neuron.trace.extended[gated][input] = getValue(neuron, 'trace', 'extended', gated, input);
}
} // get connections
for (var j in neuron.connections.projected) {
var connection = neuron.connections.projected[j];
connection.weight = getValue(connection, 'weight');
connection.gain = getValue(connection, 'gain');
}
}
}
// returns all the neurons in the network
}, {
key: 'neurons',
value: function neurons() {
var neurons = [];
var inputLayer = this.layers.input.neurons(),
outputLayer = this.layers.output.neurons();
for (var i = 0; i < inputLayer.length; i++) {
neurons.push({
neuron: inputLayer[i],
layer: 'input'
});
}
for (var i = 0; i < this.layers.hidden.length; i++) {
var hiddenLayer = this.layers.hidden[i].neurons();
for (var j = 0; j < hiddenLayer.length; j++) {
neurons.push({
neuron: hiddenLayer[j],
layer: i
});
}
}
for (var i = 0; i < outputLayer.length; i++) {
neurons.push({
neuron: outputLayer[i],
layer: 'output'
});
}
return neurons;
}
// returns number of inputs of the network
}, {
key: 'inputs',
value: function inputs() {
return this.layers.input.size;
}
// returns number of outputs of hte network
}, {
key: 'outputs',
value: function outputs() {
return this.layers.output.size;
}
// sets the layers of the network
}, {
key: 'set',
value: function set(layers) {
this.layers = {
input: layers.input || null,
hidden: layers.hidden || [],
output: layers.output || null
};
if (this.optimized) this.optimized.reset();
}
}, {
key: 'setOptimize',
value: function setOptimize(bool) {
this.restore();
if (this.optimized) this.optimized.reset();
this.optimized = bool ? null : false;
}
// returns a json that represents all the neurons and connections of the network
}, {
key: 'toJSON',
value: function toJSON(ignoreTraces) {
this.restore();
var list = this.neurons();
var neurons = [];
var connections = [];
// link id's to positions in the array
var ids = {};
for (var i = 0; i < list.length; i++) {
var neuron = list[i].neuron;
while (neuron.neuron) {
neuron = neuron.neuron;
}ids[neuron.ID] = i;
var copy = {
trace: {
elegibility: {},
extended: {}
},
state: neuron.state,
old: neuron.old,
activation: neuron.activation,
bias: neuron.bias,
layer: list[i].layer
};
copy.squash = neuron.squash == _Neuron2.default.squash.LOGISTIC ? 'LOGISTIC' : neuron.squash == _Neuron2.default.squash.TANH ? 'TANH' : neuron.squash == _Neuron2.default.squash.IDENTITY ? 'IDENTITY' : neuron.squash == _Neuron2.default.squash.HLIM ? 'HLIM' : neuron.squash == _Neuron2.default.squash.RELU ? 'RELU' : null;
neurons.push(copy);
}
for (var i = 0; i < list.length; i++) {
var neuron = list[i].neuron;
while (neuron.neuron) {
neuron = neuron.neuron;
}for (var j in neuron.connections.projected) {
var connection = neuron.connections.projected[j];
connections.push({
from: ids[connection.from.ID],
to: ids[connection.to.ID],
weight: connection.weight,
gater: connection.gater ? ids[connection.gater.ID] : null
});
}
if (neuron.selfconnected()) {
connections.push({
from: ids[neuron.ID],
to: ids[neuron.ID],
weight: neuron.selfconnection.weight,
gater: neuron.selfconnection.gater ? ids[neuron.selfconnection.gater.ID] : null
});
}
}
return {
neurons: neurons,
connections: connections
};
}
// export the topology into dot language which can be visualized as graphs using dot
/* example: ... console.log(net.toDotLang());
$ node example.js > example.dot
$ dot example.dot -Tpng > out.png
*/
}, {
key: 'toDot',
value: function toDot(edgeConnection) {
if (!(typeof edgeConnection === 'undefined' ? 'undefined' : _typeof(edgeConnection))) edgeConnection = false;
var code = 'digraph nn {\n rankdir = BT\n';
var layers = [this.layers.input].concat(this.layers.hidden, this.layers.output);
for (var i = 0; i < layers.length; i++) {
for (var j = 0; j < layers[i].connectedTo.length; j++) {
// projections
var connection = layers[i].connectedTo[j];
var layerTo = connection.to;
var size = connection.size;
var layerID = layers.indexOf(layers[i]);
var layerToID = layers.indexOf(layerTo);
/* http://stackoverflow.com/questions/26845540/connect-edges-with-graph-dot
* DOT does not support edge-to-edge connections
* This workaround produces somewhat weird graphs ...
*/
if (edgeConnection) {
if (connection.gatedfrom.length) {
var fakeNode = 'fake' + layerID + '_' + layerToID;
code += ' ' + fakeNode + ' [label = "", shape = point, width = 0.01, height = 0.01]\n';
code += ' ' + layerID + ' -> ' + fakeNode + ' [label = ' + size + ', arrowhead = none]\n';
code += ' ' + fakeNode + ' -> ' + layerToID + '\n';
} else code += ' ' + layerID + ' -> ' + layerToID + ' [label = ' + size + ']\n';
for (var from in connection.gatedfrom) {
// gatings
var layerfrom = connection.gatedfrom[from].layer;
var layerfromID = layers.indexOf(layerfrom);
code += ' ' + layerfromID + ' -> ' + fakeNode + ' [color = blue]\n';
}
} else {
code += ' ' + layerID + ' -> ' + layerToID + ' [label = ' + size + ']\n';
for (var from in connection.gatedfrom) {
// gatings
var layerfrom = connection.gatedfrom[from].layer;
var layerfromID = layers.indexOf(layerfrom);
code += ' ' + layerfromID + ' -> ' + layerToID + ' [color = blue]\n';
}
}
}
}
code += '}\n';
return {
code: code,
link: 'https://chart.googleapis.com/chart?chl=' + escape(code.replace('/ /g', '+')) + '&cht=gv'
};
}
// returns a function that works as the activation of the network and can be used without depending on the library
}, {
key: 'standalone',
value: function standalone() {
if (!this.optimized) this.optimize();
var data = this.optimized.data;
// build activation function
var activation = 'function (input) {\n';
// build inputs
for (var i = 0; i < data.inputs; i++) {
activation += 'F[' + data.inputs[i] + '] = input[' + i + '];\n';
} // build network activation
for (var i = 0; i < data.activate.length; i++) {
// shouldn't this be layer?
for (var j = 0; j < data.activate[i].length; j++) {
activation += data.activate[i][j].join('') + '\n';
}
}
// build outputs
activation += 'var output = [];\n';
for (var i = 0; i < data.outputs.length; i++) {
activation += 'output[' + i + '] = F[' + data.outputs[i] + '];\n';
}activation += 'return output;\n}';
// reference all the positions in memory
var memory = activation.match(/F\[(\d+)\]/g);
var dimension = 0;
var ids = {};
for (var i = 0; i < memory.length; i++) {
var tmp = memory[i].match(/\d+/)[0];
if (!(tmp in ids)) {
ids[tmp] = dimension++;
}
}
var hardcode = 'F = {\n';
for (var i in ids) {
hardcode += ids[i] + ': ' + this.optimized.memory[i] + ',\n';
}hardcode = hardcode.substring(0, hardcode.length - 2) + '\n};\n';
hardcode = 'var run = ' + activation.replace(/F\[(\d+)]/g, function (index) {
return 'F[' + ids[index.match(/\d+/)[0]] + ']';
}).replace('{\n', '{\n' + hardcode + '') + ';\n';
hardcode += 'return run';
// return standalone function
return new Function(hardcode)();
}
// Return a HTML5 WebWorker specialized on training the network stored in `memory`.
// Train based on the given dataSet and options.
// The worker returns the updated `memory` when done.
}, {
key: 'worker',
value: function worker(memory, set, options) {
// Copy the options and set defaults (options might be different for each worker)
var workerOptions = {};
if (options) workerOptions = options;
workerOptions.rate = options.rate || .2;
workerOptions.iterations = options.iterations || 100000;
workerOptions.error = options.error || .005;
workerOptions.cost = options.cost || null;
workerOptions.crossValidate = options.crossValidate || null;
// Cost function might be different for each worker
costFunction = 'var cost = ' + (options && options.cost || this.cost || _Trainer2.default.cost.MSE) + ';\n';
var workerFunction = Network.getWorkerSharedFunctions();
workerFunction = workerFunction.replace(/var cost = options && options\.cost \|\| this\.cost \|\| Trainer\.cost\.MSE;/g, costFunction);
// Set what we do when training is finished
workerFunction = workerFunction.replace('return results;', 'postMessage({action: "done", message: results, memoryBuffer: F}, [F.buffer]);');
// Replace log with postmessage
workerFunction = workerFunction.replace('console.log(\'iterations\', iterations, \'error\', error, \'rate\', currentRate)', 'postMessage({action: \'log\', message: {\n' + 'iterations: iterations,\n' + 'error: error,\n' + 'rate: currentRate\n' + '}\n' + '})');
// Replace schedule with postmessage
workerFunction = workerFunction.replace('abort = this.schedule.do({ error: error, iterations: iterations, rate: currentRate })', 'postMessage({action: \'schedule\', message: {\n' + 'iterations: iterations,\n' + 'error: error,\n' + 'rate: currentRate\n' + '}\n' + '})');
if (!this.optimized) this.optimize();
var hardcode = 'var inputs = ' + this.optimized.data.inputs.length + ';\n';
hardcode += 'var outputs = ' + this.optimized.data.outputs.length + ';\n';
hardcode += 'var F = new Float64Array([' + this.optimized.memory.toString() + ']);\n';
hardcode += 'var activate = ' + this.optimized.activate.toString() + ';\n';
hardcode += 'var propagate = ' + this.optimized.propagate.toString() + ';\n';
hardcode += 'onmessage = function(e) {\n' + 'if (e.data.action == \'startTraining\') {\n' + 'train(' + JSON.stringify(set) + ',' + JSON.stringify(workerOptions) + ');\n' + '}\n' + '}';
var workerSourceCode = workerFunction + '\n' + hardcode;
var blob = new Blob([workerSourceCode]);
var blobURL = window.URL.createObjectURL(blob);
return new Worker(blobURL);
}
// returns a copy of the network
}, {
key: 'clone',
value: function clone() {
return Network.fromJSON(this.toJSON());
}
/**
* Creates a static String to store the source code of the functions
* that are identical for all the workers (train, _trainSet, test)
*
* @return {String} Source code that can train a network inside a worker.
* @static
*/
}], [{
key: 'getWorkerSharedFunctions',
value: function getWorkerSharedFunctions() {
// If we already computed the source code for the shared functions
if (typeof Network._SHARED_WORKER_FUNCTIONS !== 'undefined') return Network._SHARED_WORKER_FUNCTIONS;
// Otherwise compute and return the source code
// We compute them by simply copying the source code of the train, _trainSet and test functions
// using the .toString() method
// Load and name the train function
var train_f = _Trainer2.default.prototype.train.toString();
train_f = train_f.replace('function (set', 'function train(set') + '\n';
// Load and name the _trainSet function
var _trainSet_f = _Trainer2.default.prototype._trainSet.toString().replace(/this.network./g, '');
_trainSet_f = _trainSet_f.replace('function (set', 'function _trainSet(set') + '\n';
_trainSet_f = _trainSet_f.replace('this.crossValidate', 'crossValidate');
_trainSet_f = _trainSet_f.replace('crossValidate = true', 'crossValidate = { }');
// Load and name the test function
var test_f = _Trainer2.default.prototype.test.toString().replace(/this.network./g, '');
test_f = test_f.replace('function (set', 'function test(set') + '\n';
return Network._SHARED_WORKER_FUNCTIONS = train_f + _trainSet_f + test_f;
}
}, {
key: 'fromJSON',
value: function fromJSON(json) {
var neurons = [];
var layers = {
input: new _Layer2.default(),
hidden: [],
output: new _Layer2.default()
};
for (var i = 0; i < json.neurons.length; i++) {
var config = json.neurons[i];
var neuron = new _Neuron2.default();
neuron.trace.elegibility = {};
neuron.trace.extended = {};
neuron.state = config.state;
neuron.old = config.old;
neuron.activation = config.activation;
neuron.bias = config.bias;
neuron.squash = config.squash in _Neuron2.default.squash ? _Neuron2.default.squash[config.squash] : _Neuron2.default.squash.LOGISTIC;
neurons.push(neuron);
if (config.layer == 'input') layers.input.add(neuron);else if (config.layer == 'output') layers.output.add(neuron);else {
if (typeof layers.hidden[config.layer] == 'undefined') layers.hidden[config.layer] = new _Layer2.default();
layers.hidden[config.layer].add(neuron);
}
}
for (var i = 0; i < json.connections.length; i++) {
var config = json.connections[i];
var from = neurons[config.from];
var to = neurons[config.to];
var weight = config.weight;
var gater = neurons[config.gater];
var connection = from.project(to, weight);
if (gater) gater.gate(connection);
}
return new Network(layers);
}
}]);
return Network;
}();
exports.default = Network;
/***/ }),
/* 2 */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _Connection = __webpack_require__(5);
var _Connection2 = _interopRequireDefault(_Connection);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
var neurons = 0;
// squashing functions
var squash = {
// eq. 5 & 5'
LOGISTIC: function LOGISTIC(x, derivate) {
var fx = 1 / (1 + Math.exp(-x));
if (!derivate) return fx;
return fx * (1 - fx);
},
TANH: function TANH(x, derivate) {
if (derivate) return 1 - Math.pow(Math.tanh(x), 2);
return Math.tanh(x);
},
IDENTITY: function IDENTITY(x, derivate) {
return derivate ? 1 : x;
},
HLIM: function HLIM(x, derivate) {
return derivate ? 1 : x > 0 ? 1 : 0;
},
RELU: function RELU(x, derivate) {
if (derivate) return x > 0 ? 1 : 0;
return x > 0 ? x : 0;
}
};
var Neuron = function () {
function Neuron() {
_classCallCheck(this, Neuron);
this.ID = Neuron.uid();
this.connections = {
inputs: {},
projected: {},
gated: {}
};
this.error = {
responsibility: 0,
projected: 0,
gated: 0
};
this.trace = {
elegibility: {},
extended: {},
influences: {}
};
this.state = 0;
this.old = 0;
this.activation = 0;
this.selfconnection = new _Connection2.default(this, this, 0); // weight = 0 -> not connected
this.squash = Neuron.squash.LOGISTIC;
this.neighboors = {};
this.bias = Math.random() * .2 - .1;
}
// activate the neuron
_createClass(Neuron, [{
key: 'activate',
value: function activate(input) {
// activation from enviroment (for input neurons)
if (typeof input != 'undefined') {
this.activation = input;
this.derivative = 0;
this.bias = 0;
return this.activation;
}
// old state
this.old = this.state;
// eq. 15
this.state = this.selfconnection.gain * this.selfconnection.weight * this.state + this.bias;
for (var i in this.connections.inputs) {
var input = this.connections.inputs[i];
this.state += input.from.activation * input.weight * input.gain;
}
// eq. 16
this.activation = this.squash(this.state);
// f'(s)
this.derivative = this.squash(this.state, true);
// update traces
var influences = [];
for (var id in this.trace.extended) {
// extended elegibility trace
var neuron = this.neighboors[id];
// if gated neuron's selfconnection is gated by this unit, the influence keeps track of the neuron's old state
var influence = neuron.selfconnection.gater == this ? neuron.old : 0;
// index runs over all the incoming connections to the gated neuron that are gated by this unit
for (var incoming in this.trace.influences[neuron.ID]) {
// captures the effect that has an input connection to this unit, on a neuron that is gated by this unit
influence += this.trace.influences[neuron.ID][incoming].weight * this.trace.influences[neuron.ID][incoming].from.activation;
}
influences[neuron.ID] = influence;
}
for (var i in this.connections.inputs) {
var input = this.connections.inputs[i];
// elegibility trace - Eq. 17
this.trace.elegibility[input.ID] = this.selfconnection.gain * this.selfconnection.weight * this.trace.elegibility[input.ID] + input.gain * input.from.activation;
for (var id in this.trace.extended) {
// extended elegibility trace
var xtrace = this.trace.extended[id];
var neuron = this.neighboors[id];
var influence = influences[neuron.ID];
// eq. 18
xtrace[input.ID] = neuron.selfconnection.gain * neuron.selfconnection.weight * xtrace[input.ID] + this.derivative * this.trace.elegibility[input.ID] * influence;
}
}
// update gated connection's gains
for (var connection in this.connections.gated) {
this.connections.gated[connection].gain = this.activation;
}
return this.activation;
}
// back-propagate the error
}, {
key: 'propagate',
value: function propagate(rate, target) {
// error accumulator
var error = 0;
// whether or not this neuron is in the output layer
var isOutput = typeof target != 'undefined';
// output neurons get their error from the enviroment
if (isOutput) this.error.responsibility = this.error.projected = target - this.activation; // Eq. 10
else // the rest of the neuron compute their error responsibilities by backpropagation
{
// error responsibilities from all the connections projected from this neuron
for (var id in this.connections.projected) {
var connection = this.connections.projected[id];
var neuron = connection.to;
// Eq. 21
error += neuron.error.responsibility * connection.gain * connection.weight;
}
// projected error responsibility
this.error.projected = this.derivative * error;
error = 0;
// error responsibilities from all the connections gated by this neuron
for (var id in this.trace.extended) {
var neuron = this.neighboors[id]; // gated neuron
var influence = neuron.selfconnection.gater == this ? neuron.old : 0; // if gated neuron's selfconnection is gated by this neuron
// index runs over all the connections to the gated neuron that are gated by this neuron
for (var input in this.trace.influences[id]) {
// captures the effect that the input connection of this neuron have, on a neuron which its input/s is/are gated by this neuron
influence += this.trace.influences[id][input].weight * this.trace.influences[neuron.ID][input].from.activation;
}
// eq. 22
error += neuron.error.responsibility * influence;
}
// gated error responsibility
this.error.gated = this.derivative * error;
// error responsibility - Eq. 23
this.error.responsibility = this.error.projected + this.error.gated;
}
// learning rate
rate = rate || .1;
// adjust all the neuron's incoming connections
for (var id in this.connections.inputs) {
var input = this.connections.inputs[id];
// Eq. 24
var gradient = this.error.projected * this.trace.elegibility[input.ID];
for (var id in this.trace.extended) {
var neuron = this.neighboors[id];
gradient += neuron.error.responsibility * this.trace.extended[neuron.ID][input.ID];
}
input.weight += rate * gradient; // adjust weights - aka learn
}
// adjust bias
this.bias += rate * this.error.responsibility;
}
}, {
key: 'project',
value: function project(neuron, weight) {
// self-connection
if (neuron == this) {
this.selfconnection.weight = 1;
return this.selfconnection;
}
// check if connection already exists
var connected = this.connected(neuron);
if (connected && connected.type == 'projected') {
// update connection
if (typeof weight != 'undefined') connected.connection.weight = weight;
// return existing connection
return connected.connection;
} else {
// create a new connection
var connection = new _Connection2.default(this, neuron, weight);
}
// reference all the connections and traces
this.connections.projected[connection.ID] = connection;
this.neighboors[neuron.ID] = neuron;
neuron.connections.inputs[connection.ID] = connection;
neuron.trace.elegibility[connection.ID] = 0;
for (var id in neuron.trace.extended) {
var trace = neuron.trace.extended[id];
trace[connection.ID] = 0;
}
return connection;
}
}, {
key: 'gate',
value: function gate(connection) {
// add connection to gated list
this.connections.gated[connection.ID] = connection;
var neuron = connection.to;
if (!(neuron.ID in this.trace.extended)) {
// extended trace
this.neighboors[neuron.ID] = neuron;
var xtrace = this.trace.extended[neuron.ID] = {};
for (var id in this.connections.inputs) {
var input = this.connections.inputs[id];
xtrace[input.ID] = 0;
}
}
// keep track
if (neuron.ID in this.trace.influences) this.trace.influences[neuron.ID].push(connection);else this.trace.influences[neuron.ID] = [connection];
// set gater
connection.gater = this;
}
// returns true or false whether the neuron is self-connected or not
}, {
key: 'selfconnected',
value: function selfconnected() {
return this.selfconnection.weight !== 0;
}
// returns true or false whether the neuron is connected to another neuron (parameter)
}, {
key: 'connected',
value: function connected(neuron) {
var result = {
type: null,
connection: false
};
if (this == neuron) {
if (this.selfconnected()) {
result.type = 'selfconnection';
result.connection = this.selfconnection;
return result;
} else return false;
}
for (var type in this.connections) {
for (var connection in this.connections[type]) {
var connection = this.connections[type][connection];
if (connection.to == neuron) {
result.type = type;
result.connection = connection;
return result;
} else if (connection.from == neuron) {
result.type = type;
result.connection = connection;
return result;
}
}
}
return false;
}
// clears all the traces (the neuron forgets it's context, but the connections remain intact)
}, {
key: 'clear',
value: function clear() {
for (var trace in this.trace.elegibility) {
this.trace.elegibility[trace] = 0;
}
for (var trace in this.trace.extended) {
for (var extended in this.trace.extended[trace]) {
this.trace.extended[trace][extended] = 0;
}
}
this.error.responsibility = this.error.projected = this.error.gated = 0;
}
// all the connections are randomized and the traces are cleared
}, {
key: 'reset',
value: function reset() {
this.clear();
for (var type in this.connections) {
for (var connection in this.connections[type]) {
this.connections[type][connection].weight = Math.random() * .2 - .1;
}
}
this.bias = Math.random() * .2 - .1;
this.old = this.state = this.activation = 0;
}
// hardcodes the behaviour of the neuron into an optimized function
}, {
key: 'optimize',
value: function optimize(optimized, layer) {
optimized = optimized || {};
var store_activation = [];
var store_trace = [];
var store_propagation = [];
var varID = optimized.memory || 0;
var neurons = optimized.neurons || 1;
var inputs = optimized.inputs || [];
var targets = optimized.targets || [];
var outputs = optimized.outputs || [];
var variables = optimized.variables || {};
var activation_sentences = optimized.activation_sentences || [];
var trace_sentences = optimized.trace_sentences || [];
var propagation_sentences = optimized.propagation_sentences || [];
var layers = optimized.layers || { __count: 0, __neuron: 0 };
// allocate sentences
var allocate = function allocate(store) {
var allocated = layer in layers && store[layers.__count];
if (!allocated) {
layers.__count = store.push([]) - 1;
layers[layer] = layers.__count;
}
};
allocate(activation_sentences);
allocate(trace_sentences);
allocate(propagation_sentences);
var currentLayer = layers.__count;
// get/reserve space in memory by creating a unique ID for a variablel