Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add layer and neuron #98

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
8 changes: 8 additions & 0 deletions app/src/js/factory.js
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,14 @@ export const newNetwork = (layerSizes) => {
network = new anny.Network(layers)
}

export const addLayer = () => {
network.addLayer(new anny.Layer(1))
}

export const addNeuron = () => {
_.sample(network.hiddenLayers).addNeuron()
}

export const activate = (inputs) => {
network.activate(inputs || _.times(network.inputLayer.neurons.length, Math.random))
}
10 changes: 10 additions & 0 deletions app/src/js/toolbar.js
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,16 @@ export const newLogicNetwork = () => {
graph.update(factory.network)
}

export const addLayer = () => {
factory.addLayer()
graph.update(factory.network)
}

export const addNeuron = () => {
factory.addNeuron()
graph.update(factory.network)
}

export const newRandomNetwork = () => {
factory.newNetwork()
graph.update(factory.network)
Expand Down
2 changes: 1 addition & 1 deletion gulp/tasks/anny.js
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ gulp.task('anny:build', (cb) => {
},
module: {
loaders: [
{ test: /\.js$/, loaders: ['babel', 'eslint'], include: [paths.annySrc] },
{ test: /\.js$/, loaders: ['babel'], include: [paths.annySrc] },
],
},
}
Expand Down
2 changes: 1 addition & 1 deletion gulp/tasks/app.js
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ gulp.task('app:build:js', (cb) => {
},
module: {
loaders: [
{ test: /\.js$/, loaders: ['babel', 'eslint'], include: [paths.appSrc] },
{ test: /\.js$/, loaders: ['babel'], include: [paths.appSrc] },
],
},
externals: {
Expand Down
17 changes: 7 additions & 10 deletions gulp/tasks/watch.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,14 @@ const g = require('gulp-load-plugins')()
const gulp = g.help(require('gulp'), require('../gulphelp'))

gulp.task('watch', 'rebuild when files change', (cb) => {
gulp.watch([
// anny
paths.annyEntry,
`${paths.annySrc}/**/*`,
// anny
gulp.watch(`${paths.annySrc}/**/*`, ['anny:build'])

// app
`${paths.root}/index.html`,
// app
gulp.watch([
`${paths.appSrc}/**/*`,
], ['build'])
`${paths.root}/index.html`,
], ['app:build'])

// docs
gulp.watch([
Expand All @@ -22,9 +21,7 @@ gulp.task('watch', 'rebuild when files change', (cb) => {
], ['docs'])

// docs less
gulp.watch([
`${paths.docsSrc}/**/*.less`,
], ['docs-less'])
gulp.watch(`${paths.docsSrc}/static/styles/**/*`, ['docs:styles'])

cb()
})
6 changes: 6 additions & 0 deletions index.html
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,12 @@
<div class="item" onclick="annyApp.toolbar.newRandomNetwork()">
<i class="icon ion-ios-shuffle"></i>rand
</div>
<div class="item" onclick="annyApp.toolbar.addNeuron()">
<i class="icon ion-ios-plus-outline"></i>neuron
</div>
<div class="item" onclick="annyApp.toolbar.addLayer()">
<i class="icon ion-ios-plus-outline"></i>layer
</div>

<div class="divider"></div>

Expand Down
1 change: 0 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,6 @@
"eslint": "^3.12.2",
"eslint-config-airbnb": "^13.0.0",
"eslint-config-defaults": "^9.0.0",
"eslint-loader": "^1.6.1",
"eslint-plugin-import": "^2.2.0",
"eslint-plugin-jsx-a11y": "^2.2.3",
"eslint-plugin-lodash": "^2.2.4",
Expand Down
33 changes: 27 additions & 6 deletions src/Layer.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,17 +14,18 @@ class Layer {
/**
* Creates a single dimension Layer of [Neurons]{@link Neuron}.
* @param {number} size - The number of Neurons this Layer should have.
* @param {number} [learningRate] - The learning rate passed directly to the
* Neuron constructor.
* @param {object} [activation] - The activation function passed directly to
* the
* Neuron constructor.
* @param {number} [learningRate] - The learning rate passed directly to the Neuron constructor.
* @param {object} [activation] - The activation function passed directly to the Neuron constructor.
*/
constructor(size, activation, learningRate) {
if (!_.isNumber(size)) {
throw new Error(`Layer() 'size' must be a number, not: ${typeof size}`)
}
this.neurons = _.times(size, () => new Neuron(activation, learningRate))
this.neurons = _.times(size, () => {
const neuron = new Neuron(activation, learningRate)
neuron.layer = this
return neuron
})
}

/**
Expand Down Expand Up @@ -60,6 +61,26 @@ class Layer {
return _.map(this.neurons, (neuron, i) => neuron.activate(values[i]))
}

/**
* Add a Neuron to this layer.
* @param {number} [learningRate] - The learning rate passed directly to the
* Neuron constructor.
* @param {object} [activation] - The activation function passed directly to
*/
addNeuron(activation, learningRate) {
const neuron = new Neuron(activation, learningRate)
this.neurons.push(neuron)

const sourceLayer = _.get(this.neurons, '[0].connection.source.layer')
console.log(sourceLayer)

if (sourceLayer) {
_.forEach(sourceLayer.neurons, (source) => {
source.connect(neuron, INITIALIZE.weight(sourceLayer.neurons.length))
})
}
}

/**
* Sets all the Neuron `delta`s in this Layer to the given array of values.
* @param {number[]} [deltas=[]] - Delta values, one for each Neuron.
Expand Down
13 changes: 13 additions & 0 deletions src/Network.js
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,19 @@ class Network {
return this.output = this.outputLayer.activate()
}

/**
* Add a layer to the output of the Network.
* @param {Layer} layer - The layer to add.
*/
addLayer(layer) {
this.outputLayer.connect(layer)
this.outputLayer = layer
this.allLayers = [...this.allLayers, layer]
_.each(this.allLayers, (layer) => {
_.each(_.sortBy(layer.neurons, 'id'), n => console.log(n.id))
})
}

/**
* Set Network `error` and output Layer `delta`s and propagate them backward
* through the Network. The input Layer has no use for deltas, so it is skipped.
Expand Down