Tiny RNN

node v10.24.1
version: 1.0.0
endpointsharetweet
This is the simples possible Recurrent Neural Network example. Written by following this tutorial https://dev.to/liashchynskyi/creating-of-neural-network-using-javascript-in-7minutes-o21
const { exp, random, multiply, dotMultiply, mean, abs, subtract, transpose, add, matrix } = require('mathjs')
First the activation function, that's going to be the Sigmoid function (which I've heard it's jurassic in terms of machine learning algorithms).
/* old hat activation function */ const sigmoid = (x, derivative) => { const fx = 1 / (1 + exp(-x)) return derivative ? fx * (1 - fx) : fx }
The tutorial decided to call this class NeuralNetwork, but I have the feeling NeuralLayer would be more aproppriate. But I might be wrong.
/* object orientation šŸ¤¢ based neural net */ class NeuralNetwork { constructor(...args) { [ this.input_nodes, /* input neurons */ this.hidden_nodes, /* hidden neurons */ this.output_nodes /* output neurons */ ] = args this.epochs = 50000 this.activation = sigmoid this.lr = .5 /* learning rate */ this.output = 0 /* from input to hidden layer */ this.synapse0 = random([this.input_nodes, this.hidden_nodes], -1.0, 1.0) /* from hidden layer to output */ this.synapse1 = random([this.hidden_nodes, this.output_nodes], -1.0, 1.0) } setEpochs(numEpochs) { this.epochs = numEpochs } setLearningRate(lr) { this.lr = lr } train(input, target) { for (let i = 0; i < this.epochs; i++) { /* foward propagation */ let input_layer = input let hidden_layer = multiply(input_layer, this.synapse0).map(v => this.activation(v, false)) let output_layer = multiply(hidden_layer, this.synapse1).map(v => this.activation(v, false)) /* backward propagation */ let output_error = subtract(target, output_layer) let output_delta = dotMultiply(output_error, output_layer.map(v => this.activation(v, true))) let hidden_error = multiply(output_delta, transpose(this.synapse1)) let hidden_delta = dotMultiply(hidden_error, hidden_layer.map(v => this.activation(v, true))) /* gradient descent */ this.synapse1 = add(this.synapse1, multiply(transpose(hidden_layer), multiply(output_delta, this.lr))) this.synapse0 = add(this.synapse0, multiply(transpose(input_layer), multiply(hidden_delta, this.lr))) this.output = () => output_layer (i % 10000 === 0) && console.log(`Error: ${ mean(abs(output_error)) }`) } } predict(input) { let input_layer = input let hidden_layer = multiply(input_layer, this.synapse0).map(v => this.activation(v, false)) let output_layer = multiply(hidden_layer, this.synapse1).map(v => this.activation(v, false)) return output_layer } }
That was a long block. But all the magic is in that one object. (magic = math) The final block simply uses the code from the NeuralNetwork class.
const input = matrix([ [0, 0], [0, 1], [1, 0], [1, 1] ]) const target = matrix([ [0], [1], [1], [0] ]) const nn = new NeuralNetwork(2, 4, 1) nn.train(input, target) console.log(nn.predict(input))
Loadingā€¦

no comments

    sign in to comment