diff --git a/app/src/js/factory.js b/app/src/js/factory.js
index 10e4215..5234371 100644
--- a/app/src/js/factory.js
+++ b/app/src/js/factory.js
@@ -31,6 +31,14 @@ export const newNetwork = (layerSizes) => {
network = new anny.Network(layers)
}
+export const addLayer = () => {
+ network.addLayer(new anny.Layer(1))
+}
+
+export const addNeuron = () => {
+ _.sample(network.hiddenLayers).addNeuron()
+}
+
export const activate = (inputs) => {
network.activate(inputs || _.times(network.inputLayer.neurons.length, Math.random))
}
diff --git a/app/src/js/toolbar.js b/app/src/js/toolbar.js
index ebe64b1..7d1d421 100644
--- a/app/src/js/toolbar.js
+++ b/app/src/js/toolbar.js
@@ -48,6 +48,16 @@ export const newLogicNetwork = () => {
graph.update(factory.network)
}
+export const addLayer = () => {
+ factory.addLayer()
+ graph.update(factory.network)
+}
+
+export const addNeuron = () => {
+ factory.addNeuron()
+ graph.update(factory.network)
+}
+
export const newRandomNetwork = () => {
factory.newNetwork()
graph.update(factory.network)
diff --git a/gulp/tasks/anny.js b/gulp/tasks/anny.js
index 2cf3e4b..f9d9f7b 100644
--- a/gulp/tasks/anny.js
+++ b/gulp/tasks/anny.js
@@ -37,7 +37,7 @@ gulp.task('anny:build', (cb) => {
},
module: {
loaders: [
- { test: /\.js$/, loaders: ['babel', 'eslint'], include: [paths.annySrc] },
+ { test: /\.js$/, loaders: ['babel'], include: [paths.annySrc] },
],
},
}
diff --git a/gulp/tasks/app.js b/gulp/tasks/app.js
index 02290bd..0071b95 100644
--- a/gulp/tasks/app.js
+++ b/gulp/tasks/app.js
@@ -54,7 +54,7 @@ gulp.task('app:build:js', (cb) => {
},
module: {
loaders: [
- { test: /\.js$/, loaders: ['babel', 'eslint'], include: [paths.appSrc] },
+ { test: /\.js$/, loaders: ['babel'], include: [paths.appSrc] },
],
},
externals: {
diff --git a/gulp/tasks/watch.js b/gulp/tasks/watch.js
index 46c703d..85d24b3 100644
--- a/gulp/tasks/watch.js
+++ b/gulp/tasks/watch.js
@@ -4,15 +4,14 @@ const g = require('gulp-load-plugins')()
const gulp = g.help(require('gulp'), require('../gulphelp'))
gulp.task('watch', 'rebuild when files change', (cb) => {
- gulp.watch([
- // anny
- paths.annyEntry,
- `${paths.annySrc}/**/*`,
+ // anny
+ gulp.watch(`${paths.annySrc}/**/*`, ['anny:build'])
- // app
- `${paths.root}/index.html`,
+ // app
+ gulp.watch([
`${paths.appSrc}/**/*`,
- ], ['build'])
+ `${paths.root}/index.html`,
+ ], ['app:build'])
// docs
gulp.watch([
@@ -22,9 +21,7 @@ gulp.task('watch', 'rebuild when files change', (cb) => {
], ['docs'])
// docs less
- gulp.watch([
- `${paths.docsSrc}/**/*.less`,
- ], ['docs-less'])
+ gulp.watch(`${paths.docsSrc}/static/styles/**/*`, ['docs:styles'])
cb()
})
diff --git a/index.html b/index.html
index 5d0be98..196badf 100644
--- a/index.html
+++ b/index.html
@@ -38,6 +38,12 @@
rand
+
+ neuron
+
+
+ layer
+
diff --git a/package.json b/package.json
index 8c4bf0e..5a7587b 100644
--- a/package.json
+++ b/package.json
@@ -64,7 +64,6 @@
"eslint": "^3.12.2",
"eslint-config-airbnb": "^13.0.0",
"eslint-config-defaults": "^9.0.0",
- "eslint-loader": "^1.6.1",
"eslint-plugin-import": "^2.2.0",
"eslint-plugin-jsx-a11y": "^2.2.3",
"eslint-plugin-lodash": "^2.2.4",
diff --git a/src/Layer.js b/src/Layer.js
index 16b5dc5..2fcaa43 100644
--- a/src/Layer.js
+++ b/src/Layer.js
@@ -14,17 +14,18 @@ class Layer {
/**
* Creates a single dimension Layer of [Neurons]{@link Neuron}.
* @param {number} size - The number of Neurons this Layer should have.
- * @param {number} [learningRate] - The learning rate passed directly to the
- * Neuron constructor.
- * @param {object} [activation] - The activation function passed directly to
- * the
- * Neuron constructor.
+ * @param {number} [learningRate] - The learning rate passed directly to the Neuron constructor.
+ * @param {object} [activation] - The activation function passed directly to the Neuron constructor.
*/
constructor(size, activation, learningRate) {
if (!_.isNumber(size)) {
throw new Error(`Layer() 'size' must be a number, not: ${typeof size}`)
}
- this.neurons = _.times(size, () => new Neuron(activation, learningRate))
+ this.neurons = _.times(size, () => {
+ const neuron = new Neuron(activation, learningRate)
+ neuron.layer = this
+ return neuron
+ })
}
/**
@@ -60,6 +61,26 @@ class Layer {
return _.map(this.neurons, (neuron, i) => neuron.activate(values[i]))
}
+ /**
+ * Add a Neuron to this layer.
+ * @param {number} [learningRate] - The learning rate passed directly to the
+ * Neuron constructor.
+ * @param {object} [activation] - The activation function passed directly to
+ */
+ addNeuron(activation, learningRate) {
+ const neuron = new Neuron(activation, learningRate)
+ this.neurons.push(neuron)
+
+ const sourceLayer = _.get(this.neurons, '[0].connection.source.layer')
+ console.log(sourceLayer)
+
+ if (sourceLayer) {
+ _.forEach(sourceLayer.neurons, (source) => {
+ source.connect(neuron, INITIALIZE.weight(sourceLayer.neurons.length))
+ })
+ }
+ }
+
/**
* Sets all the Neuron `delta`s in this Layer to the given array of values.
* @param {number[]} [deltas=[]] - Delta values, one for each Neuron.
diff --git a/src/Network.js b/src/Network.js
index e45c8c7..16536fa 100644
--- a/src/Network.js
+++ b/src/Network.js
@@ -100,6 +100,19 @@ class Network {
return this.output = this.outputLayer.activate()
}
+ /**
+ * Add a layer to the output of the Network.
+ * @param {Layer} layer - The layer to add.
+ */
+ addLayer(layer) {
+ this.outputLayer.connect(layer)
+ this.outputLayer = layer
+ this.allLayers = [...this.allLayers, layer]
+ _.each(this.allLayers, (layer) => {
+ _.each(_.sortBy(layer.neurons, 'id'), n => console.log(n.id))
+ })
+ }
+
/**
* Set Network `error` and output Layer `delta`s and propagate them backward
* through the Network. The input Layer has no use for deltas, so it is skipped.