From 9d6f0dd0123e416ecc76838c80c4606c08d77343 Mon Sep 17 00:00:00 2001 From: Edward Date: Thu, 11 Nov 2021 11:36:34 -0500 Subject: [PATCH 1/4] Create p006-Softmax-Activation.js --- Javascript/p006-Softmax-Activation.js | 99 +++++++++++++++++++++++++++ 1 file changed, 99 insertions(+) create mode 100644 Javascript/p006-Softmax-Activation.js diff --git a/Javascript/p006-Softmax-Activation.js b/Javascript/p006-Softmax-Activation.js new file mode 100644 index 0000000..b16e4d8 --- /dev/null +++ b/Javascript/p006-Softmax-Activation.js @@ -0,0 +1,99 @@ +/* This is a javascript implementation of neural networks from scratch in python series. +* +*The part 6 bits i.e Softmax activation struct is declared and defined after line 718 +* +* Link to the series on youtube: https://www.youtube.com/watch?v=Wo5dMEP_BbI&list=PLQVvvaa0QuDcjD5BAw2DxE6OF2tius3V3 +*/ + +const math = require("mathjs"); + +// Moved this code from spiral-data.js written by @vancegillies +// Updated by @daniel-kukiela +function spiral_data(points, classes) { + // Using MathJs functions to make matrices with zeros but converting to arrays for simplicity + const X = math.zeros(points * classes, 2).toArray(); + const y = math.zeros(points * classes, "dense").toArray(); + let ix = 0; + for (let class_number = 0; class_number < classes; class_number++) { + let r = 0; + let t = class_number * 4; + + while (r <= 1 && t <= (class_number + 1) * 4) { + // adding some randomness to t + const random_t = t + math.random(points) * 0.008; + // Was `* 0.2` but reduced so you can somewhat see the arms of spiral in visualization + // Fell free to change it back + + // converting from polar to cartesian coordinates + X[ix][0] = r * math.sin(random_t * 2.5); + X[ix][1] = r * math.cos(random_t * 2.5); + y[ix] = class_number; + + // the below two statements achieve linspace-like functionality + r += 1.0 / (points - 1); + t += 4.0 / (points - 1); + + ix++; // increment index + } + } + // Returning as MathJs matrices, could be arrays, doesnt really matter + return [math.matrix(X), math.matrix(y)]; +} + +let [X, y] = spiral_data(100, 3); + + + + + +// no randn equivalent in JS, so boxMueller transform necessary to pull appropriate values from normal distribution +// https://stackoverflow.com/questions/25582882/javascript-math-random-normal-distribution-gaussian-bell-curve +// Standard Normal variate using Box-Muller transform. +function randn_bm(n_inputs, n_neurons) { + var u = n_inputs; + var v = n_neurons; + return math.sqrt( -2.0 * math.log( u ) ) * math.cos( 2.0 * math.PI * v ); +} + +class Layer_Dense { + constructor (n_inputs, n_neurons){ + this.weights = 0.1 * randn_bm(n_inputs, n_neurons); + this.biases = math.zeros(1, n_neurons); + } + + forward (inputs) { + var biasesmat = this.biases; + // Since only adding matrices elementwise is supported, you need to make the biases into a matrix and not a vector. + for (var i=0; i Date: Thu, 11 Nov 2021 22:47:37 -0500 Subject: [PATCH 2/4] p0005 resolved - Layer_Dense function set to (2,5), resolved shape issue - Still working on a solution for max along axis with keepdims --- Javascript/p005-ReLU-Activation.js | 10 +++++----- Javascript/p006-Softmax-Activation.js | 23 +++++------------------ 2 files changed, 10 insertions(+), 23 deletions(-) diff --git a/Javascript/p005-ReLU-Activation.js b/Javascript/p005-ReLU-Activation.js index 8a2b87c..6aad1d5 100644 --- a/Javascript/p005-ReLU-Activation.js +++ b/Javascript/p005-ReLU-Activation.js @@ -3,7 +3,7 @@ Creates a dense layer of neurons with a ReLU activation function, and feeds forw Associated YT tutorial: https://www.youtu.be/gmjzbpSVY1A */ -const math = require("mathjs"); +//const math = require("mathjs"); // Moved this code from spiral-data.js written by @vancegillies // Updated by @daniel-kukiela @@ -47,7 +47,7 @@ class Layer_Dense { this.weights = math.random([n_inputs, n_neurons], -1.0, 1.0); this.biases = math.zeros(1, n_neurons); } - + forward (inputs) { var biasesmat = this.biases; // Since only adding matrices elementwise is supported, you need to make the biases into a matrix and not a vector. @@ -58,16 +58,16 @@ class Layer_Dense { class Activation_ReLU { constructor () {} - + forward (inputs) { this.output = math.matrix(inputs._data.map(layer => layer.map(i => i<0?0:i))); } } -var layer1 = new Layer_Dense(4, 5); +var layer1 = new Layer_Dense(2, 5); var activation1 = new Activation_ReLU(); layer1.forward(X); //console.log(layer1.output); activation1.forward(layer1.output); -console.log(activation1.output); \ No newline at end of file +console.log(activation1.output); diff --git a/Javascript/p006-Softmax-Activation.js b/Javascript/p006-Softmax-Activation.js index b16e4d8..9521672 100644 --- a/Javascript/p006-Softmax-Activation.js +++ b/Javascript/p006-Softmax-Activation.js @@ -5,7 +5,7 @@ * Link to the series on youtube: https://www.youtube.com/watch?v=Wo5dMEP_BbI&list=PLQVvvaa0QuDcjD5BAw2DxE6OF2tius3V3 */ -const math = require("mathjs"); +// const math = require("mathjs"); // Moved this code from spiral-data.js written by @vancegillies // Updated by @daniel-kukiela @@ -42,22 +42,9 @@ function spiral_data(points, classes) { let [X, y] = spiral_data(100, 3); - - - - -// no randn equivalent in JS, so boxMueller transform necessary to pull appropriate values from normal distribution -// https://stackoverflow.com/questions/25582882/javascript-math-random-normal-distribution-gaussian-bell-curve -// Standard Normal variate using Box-Muller transform. -function randn_bm(n_inputs, n_neurons) { - var u = n_inputs; - var v = n_neurons; - return math.sqrt( -2.0 * math.log( u ) ) * math.cos( 2.0 * math.PI * v ); -} - class Layer_Dense { constructor (n_inputs, n_neurons){ - this.weights = 0.1 * randn_bm(n_inputs, n_neurons); + this.weights = math.random([n_inputs, n_neurons], -1.0, 1.0); this.biases = math.zeros(1, n_neurons); } @@ -71,14 +58,14 @@ class Layer_Dense { class Activation_ReLU { forward (inputs) { - this.output = math.max(0, inputs); + this.output = math.matrix(inputs._data.map(layer => layer.map(i => i<0?0:i))); } } class Activation_Softmax { forward (inputs) { - let exp_values = math.exp(inputs - math.max(inputs, axis=1, keepdims=True)); - let probabilities = exp_values / math.sum(exp_values, axis=1, keepdims=True); + let exp_values = math.exp(inputs - math.max.apply(null, inputs)); + let probabilities = exp_values / math.sum(exp_values); this.output = probabilities; } } From 846db03408313167f4cb9f9792e198cf60767901 Mon Sep 17 00:00:00 2001 From: Edward Date: Sun, 14 Nov 2021 12:36:53 -0500 Subject: [PATCH 3/4] Update p006-Softmax-Activation.js --- Javascript/p006-Softmax-Activation.js | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/Javascript/p006-Softmax-Activation.js b/Javascript/p006-Softmax-Activation.js index 9521672..ea6fd88 100644 --- a/Javascript/p006-Softmax-Activation.js +++ b/Javascript/p006-Softmax-Activation.js @@ -64,9 +64,25 @@ class Activation_ReLU { class Activation_Softmax { forward (inputs) { - let exp_values = math.exp(inputs - math.max.apply(null, inputs)); - let probabilities = exp_values / math.sum(exp_values); - this.output = probabilities; + let exp_values = new Array; + + inputs.forEach ((input) => { + if (Array.isArray(input)) { + input.forEach ((element) => { + exp_values.push(math.exp(element)); + }); + } + exp_values.push(math.exp(input)); + }); + + let norm_base = math.sum(exp_values); + let norm_values = new Array; + + exp_values.forEach ((element) => { + norm_values.push(element / norm_base); + }); + + this.output = norm_values; } } From c9a9bf052fba3939db1ab075039ce8737aca0d93 Mon Sep 17 00:00:00 2001 From: Edward Date: Sun, 14 Nov 2021 20:02:00 -0500 Subject: [PATCH 4/4] Create p007-Categorical-Cross-Entropy-Loss.js --- .../p007-Categorical-Cross-Entropy-Loss.js | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 Javascript/p007-Categorical-Cross-Entropy-Loss.js diff --git a/Javascript/p007-Categorical-Cross-Entropy-Loss.js b/Javascript/p007-Categorical-Cross-Entropy-Loss.js new file mode 100644 index 0000000..0437fcf --- /dev/null +++ b/Javascript/p007-Categorical-Cross-Entropy-Loss.js @@ -0,0 +1,16 @@ +/* +* Calculating the loss with Categorical Cross Entropy +* Associated with YT NNFS tutorial: https://www.youtube.com/watch?v=dEXPMQXoiLc +*/ + +softmax_output = [0.7, 0.1, 0.2] +target_output = [1, 0, 0] + +loss = -(math.log(softmax_output[0]) * target_output[0] + + math.log(softmax_output[1]) * target_output[1] + + math.log(softmax_output[2]) * target_output[2]) + +console.log(loss) + +console.log(-math.log(0.7)) +console.log(-math.log(0.5))