Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit 78d34b3

Browse files
committedJul 5, 2016
Starts on #11, though tests not working yet
1 parent d2cd6a4 commit 78d34b3

27 files changed

+3296
-674
lines changed
 

‎.idea/jsLibraryMappings.xml

+6
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

‎index.html

+2,027
Large diffs are not rendered by default.

‎lib/brain.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
1-
exports.NeuralNetwork = require("./neuralnetwork").NeuralNetwork;
1+
exports.NeuralNetwork = require("./neural-network").NeuralNetwork;
22
exports.crossValidate = require("./cross-validate");
33
exports.likely = require("./likely");

‎lib/cross-validate.js

+54-62
Original file line numberDiff line numberDiff line change
@@ -1,76 +1,68 @@
1-
var _ = require("underscore")._;
1+
import { _ } from'underscore';
22

33
function testPartition(classifierConst, opts, trainOpts, trainSet, testSet) {
4-
var classifier = new classifierConst(opts);
5-
6-
var beginTrain = Date.now();
7-
8-
var trainingStats = classifier.train(trainSet, trainOpts);
9-
10-
var beginTest = Date.now();
11-
12-
var testStats = classifier.test(testSet);
13-
14-
var endTest = Date.now();
15-
16-
var stats = _(testStats).extend({
17-
trainTime : beginTest - beginTrain,
18-
testTime : endTest - beginTest,
19-
iterations: trainingStats.iterations,
20-
trainError: trainingStats.error,
21-
learningRate: trainOpts.learningRate,
22-
hidden: classifier.hiddenSizes,
23-
network: classifier.toJSON()
24-
});
4+
let classifier = new classifierConst(opts)
5+
, beginTrain = Date.now()
6+
, trainingStats = classifier.train(trainSet, trainOpts)
7+
, beginTest = Date.now()
8+
, testStats = classifier.test(testSet)
9+
, endTest = Date.now()
10+
, stats = _(testStats).extend({
11+
trainTime : beginTest - beginTrain,
12+
testTime : endTest - beginTest,
13+
iterations: trainingStats.iterations,
14+
trainError: trainingStats.error,
15+
learningRate: trainOpts.learningRate,
16+
hidden: classifier.hiddenSizes,
17+
network: classifier.toJSON()
18+
})
19+
;
2520

2621
return stats;
2722
}
2823

29-
module.exports = function crossValidate(classifierConst, data, opts, trainOpts, k) {
30-
k = k || 4;
31-
var size = data.length / k;
24+
export default function crossValidate(classifierConst, data, opts, trainOpts, k = 4) {
25+
let size = data.length / k;
3226

3327
data = _(data).sortBy(function() {
3428
return Math.random();
3529
});
3630

37-
var avgs = {
38-
error : 0,
39-
trainTime : 0,
40-
testTime : 0,
41-
iterations: 0,
42-
trainError: 0
43-
};
44-
45-
var stats = {
46-
truePos: 0,
47-
trueNeg: 0,
48-
falsePos: 0,
49-
falseNeg: 0,
50-
total: 0
51-
};
52-
53-
var misclasses = [];
54-
55-
var results = _.range(k).map(function(i) {
56-
var dclone = _(data).clone();
57-
var testSet = dclone.splice(i * size, size);
58-
var trainSet = dclone;
59-
60-
var result = testPartition(classifierConst, opts, trainOpts, trainSet, testSet);
61-
62-
_(avgs).each(function(sum, stat) {
63-
avgs[stat] = sum + result[stat];
64-
});
65-
66-
_(stats).each(function(sum, stat) {
67-
stats[stat] = sum + result[stat];
68-
})
69-
70-
misclasses.push(result.misclasses);
71-
72-
return result;
73-
});
31+
let avgs = {
32+
error : 0,
33+
trainTime : 0,
34+
testTime : 0,
35+
iterations: 0,
36+
trainError: 0
37+
}
38+
, stats = {
39+
truePos: 0,
40+
trueNeg: 0,
41+
falsePos: 0,
42+
falseNeg: 0,
43+
total: 0
44+
}
45+
, misclasses = []
46+
, results = _.range(k).map(function(i) {
47+
let dclone = _(data).clone()
48+
, testSet = dclone.splice(i * size, size)
49+
, trainSet = dclone
50+
, result = testPartition(classifierConst, opts, trainOpts, trainSet, testSet)
51+
;
52+
53+
_(avgs).each(function(sum, stat) {
54+
avgs[stat] = sum + result[stat];
55+
});
56+
57+
_(stats).each(function(sum, stat) {
58+
stats[stat] = sum + result[stat];
59+
});
60+
61+
misclasses.push(result.misclasses);
62+
63+
return result;
64+
})
65+
;
7466

7567
_(avgs).each(function(sum, i) {
7668
avgs[i] = sum / k;

‎lib/likely.js

+9-6
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,10 @@
1-
module.exports = function (input, net) {
2-
var output = net.run(input);
3-
var maxProp = null;
4-
var maxValue = -1;
5-
for (var prop in output) {
1+
export default function (input, net) {
2+
let output = net.run(input)
3+
, maxProp = null
4+
, maxValue = -1
5+
;
6+
7+
for (let prop in output) {
68
if (output.hasOwnProperty(prop)) {
79
var value = output[prop];
810
if (value > maxValue) {
@@ -11,5 +13,6 @@ module.exports = function (input, net) {
1113
}
1214
}
1315
}
16+
1417
return maxProp;
15-
};
18+
};

‎lib/lookup.js

+25-28
Original file line numberDiff line numberDiff line change
@@ -1,58 +1,55 @@
1-
var _ = require("underscore");
1+
import _ from 'underscore';
22

33
/* Functions for turning sparse hashes into arrays and vice versa */
4-
5-
function buildLookup(hashes) {
4+
export function buildLookup(hashes) {
65
// [{a: 1}, {b: 6, c: 7}] -> {a: 0, b: 1, c: 2}
7-
var hash = _(hashes).reduce(function(memo, hash) {
6+
let hash = _(hashes).reduce(function(memo, hash) {
87
return _(memo).extend(hash);
98
}, {});
109
return lookupFromHash(hash);
1110
}
1211

13-
function lookupFromHash(hash) {
12+
export function lookupFromHash(hash) {
1413
// {a: 6, b: 7} -> {a: 0, b: 1}
15-
var lookup = {};
16-
var index = 0;
17-
for (var i in hash) {
14+
let lookup = {}
15+
, index = 0
16+
;
17+
18+
for (let i in hash) {
1819
lookup[i] = index++;
1920
}
21+
2022
return lookup;
2123
}
2224

23-
function toArray(lookup, hash) {
25+
export function toArray(lookup, hash) {
2426
// {a: 0, b: 1}, {a: 6} -> [6, 0]
25-
var array = [];
26-
for (var i in lookup) {
27+
let array = [];
28+
for (let i in lookup) {
2729
array[lookup[i]] = hash[i] || 0;
2830
}
2931
return array;
3032
}
3133

32-
function toHash(lookup, array) {
34+
export function toHash(lookup, array) {
3335
// {a: 0, b: 1}, [6, 7] -> {a: 6, b: 7}
34-
var hash = {};
35-
for (var i in lookup) {
36+
let hash = {};
37+
for (let i in lookup) {
3638
hash[i] = array[lookup[i]];
3739
}
3840
return hash;
3941
}
4042

41-
function lookupFromArray(array) {
42-
var lookup = {};
43-
// super fast loop
44-
var z = 0;
45-
var i = array.length;
43+
export function lookupFromArray(array) {
44+
let lookup = {}
45+
// super fast loop
46+
, z = 0
47+
, i = array.length
48+
;
49+
4650
while (i-- > 0) {
4751
lookup[array[i]] = z++;
48-
};
52+
}
53+
4954
return lookup;
5055
}
51-
52-
module.exports = {
53-
buildLookup: buildLookup,
54-
lookupFromHash: lookupFromHash,
55-
toArray: toArray,
56-
toHash: toHash,
57-
lookupFromArray: lookupFromArray
58-
};

‎lib/neural-network.js

+429
Large diffs are not rendered by default.

‎lib/neuralnetwork.js

-577
This file was deleted.

‎lib/recurrent/index.js

+387
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,387 @@
1+
var Matrix = require('./matrix'),
2+
RandomMatrix = require('./matrix/random'),
3+
add = require('./matrix/add'),
4+
multiply = require('./matrix/multiply'),
5+
multiplyElement = require('./matrix/multiply-element');
6+
7+
// Transformer definitions
8+
function Graph(needs_backprop) {
9+
if(typeof needs_backprop === 'undefined') { needs_backprop = true; }
10+
this.needs_backprop = needs_backprop;
11+
12+
// this will store a list of functions that perform backprop,
13+
// in their forward pass order. So in backprop we will go
14+
// backwards and evoke each one
15+
this.backprop = [];
16+
}
17+
Graph.prototype = {
18+
backward: function() {
19+
while(this.backprop.length > 1) {
20+
this.backprop.pop()(); // tick!
21+
}
22+
},
23+
rowPluck: function(m, ix) {
24+
// pluck a row of m with index ix and return it as col vector
25+
assert(ix >= 0 && ix < m.n);
26+
var d = m.d;
27+
var out = new Matrix(d, 1);
28+
for(var i=0,n=d;i<n;i++){ out.w[i] = m.w[d * ix + i]; } // copy over the data
29+
30+
if(this.needs_backprop) {
31+
this.backprop.push(function backward() {
32+
for(var i=0,n=d;i<n;i++){ m.dw[d * ix + i] += out.dw[i]; }
33+
});
34+
}
35+
return out;
36+
},
37+
tanh: function(m) {
38+
// tanh nonlinearity
39+
var out = new Matrix(m.n, m.d);
40+
var n = m.w.length;
41+
for(var i=0;i<n;i++) {
42+
out.w[i] = Math.tanh(m.w[i]);
43+
}
44+
45+
if(this.needs_backprop) {
46+
this.backprop.push(function backward() {
47+
for(var i=0;i<n;i++) {
48+
// grad for z = tanh(x) is (1 - z^2)
49+
var mwi = out.w[i];
50+
m.dw[i] += (1.0 - mwi * mwi) * out.dw[i];
51+
}
52+
});
53+
}
54+
return out;
55+
},
56+
sigmoid: function(m) {
57+
// sigmoid nonlinearity
58+
var out = new Matrix(m.n, m.d);
59+
var n = m.w.length;
60+
for(var i=0;i<n;i++) {
61+
out.w[i] = sig(m.w[i]);
62+
}
63+
64+
if(this.needs_backprop) {
65+
this.backprop.push(function backward() {
66+
for(var i=0;i<n;i++) {
67+
// grad for z = tanh(x) is (1 - z^2)
68+
var mwi = out.w[i];
69+
m.dw[i] += mwi * (1.0 - mwi) * out.dw[i];
70+
}
71+
});
72+
}
73+
return out;
74+
},
75+
relu: function(m) {
76+
var out = new Matrix(m.n, m.d);
77+
var n = m.w.length;
78+
for(var i=0;i<n;i++) {
79+
out.w[i] = Math.max(0, m.w[i]); // relu
80+
}
81+
if(this.needs_backprop) {
82+
this.backprop.push(function backward() {
83+
for(var i=0;i<n;i++) {
84+
m.dw[i] += m.w[i] > 0 ? out.dw[i] : 0.0;
85+
}
86+
});
87+
}
88+
return out;
89+
}
90+
};
91+
92+
function softmax(m) {
93+
var out = new Matrix(m.n, m.d); // probability volume
94+
var maxval = -999999;
95+
for(var i=0,n=m.w.length;i<n;i++) { if(m.w[i] > maxval) maxval = m.w[i]; }
96+
97+
var s = 0.0;
98+
for(var i=0,n=m.w.length;i<n;i++) {
99+
out.w[i] = Math.exp(m.w[i] - maxval);
100+
s += out.w[i];
101+
}
102+
for(var i=0,n=m.w.length;i<n;i++) { out.w[i] /= s; }
103+
104+
// no backward pass here needed
105+
// since we will use the computed probabilities outside
106+
// to set gradients directly on m
107+
return out;
108+
}
109+
110+
function Solver() {
111+
this.decay_rate = 0.999;
112+
this.smooth_eps = 1e-8;
113+
this.step_cache = {};
114+
}
115+
Solver.prototype = {
116+
step: function(model, step_size, regc, clipval) {
117+
// perform parameter update
118+
var solver_stats = {};
119+
var num_clipped = 0;
120+
var num_tot = 0;
121+
for(var k in model) {
122+
if(model.hasOwnProperty(k)) {
123+
var m = model[k]; // mat ref
124+
if(!(k in this.step_cache)) { this.step_cache[k] = new Matrix(m.n, m.d); }
125+
var s = this.step_cache[k];
126+
for(var i=0,n=m.w.length;i<n;i++) {
127+
128+
// rmsprop adaptive learning rate
129+
var mdwi = m.dw[i];
130+
s.w[i] = s.w[i] * this.decay_rate + (1.0 - this.decay_rate) * mdwi * mdwi;
131+
132+
// gradient clip
133+
if(mdwi > clipval) {
134+
mdwi = clipval;
135+
num_clipped++;
136+
}
137+
if(mdwi < -clipval) {
138+
mdwi = -clipval;
139+
num_clipped++;
140+
}
141+
num_tot++;
142+
143+
// update (and regularize)
144+
m.w[i] += - step_size * mdwi / Math.sqrt(s.w[i] + this.smooth_eps) - regc * m.w[i];
145+
m.dw[i] = 0; // reset gradients for next iteration
146+
}
147+
}
148+
}
149+
solver_stats['ratio_clipped'] = num_clipped*1.0/num_tot;
150+
return solver_stats;
151+
}
152+
};
153+
154+
function LSTM(input_size, hidden_sizes, output_size) {
155+
// hidden size should be a list
156+
157+
var model = [];
158+
for(var d=0;d<hidden_sizes.length;d++) { // loop over depths
159+
var prev_size = d === 0 ? input_size : hidden_sizes[d - 1];
160+
var hidden_size = hidden_sizes[d];
161+
model.push({
162+
// gates parameters
163+
wix: new RandomMatrix(hidden_size, prev_size , 0, 0.08),
164+
wih: new RandomMatrix(hidden_size, hidden_size , 0, 0.08),
165+
bi: new Matrix(hidden_size, 1),
166+
167+
wfx: new RandomMatrix(hidden_size, prev_size , 0, 0.08),
168+
wfh: new RandomMatrix(hidden_size, hidden_size , 0, 0.08),
169+
bf: new Matrix(hidden_size, 1),
170+
171+
wox: new RandomMatrix(hidden_size, prev_size , 0, 0.08),
172+
woh: new RandomMatrix(hidden_size, hidden_size , 0, 0.08),
173+
bo: new Matrix(hidden_size, 1),
174+
175+
// cell write params
176+
wcx: new RandomMatrix(hidden_size, prev_size , 0, 0.08),
177+
wch: new RandomMatrix(hidden_size, hidden_size , 0, 0.08),
178+
bc: new Matrix(hidden_size, 1)
179+
});
180+
}
181+
// decoder params
182+
model.whd = new RandomMatrix(output_size, hidden_size, 0, 0.08);
183+
model.bd = new Matrix(output_size, 1);
184+
return model;
185+
}
186+
187+
/**
188+
*
189+
* @param {Graph} graph
190+
* @param model
191+
* @param hidden_sizes
192+
* @param x
193+
* @param prev
194+
* @returns {{h: Array, c: Array, o}}
195+
*/
196+
function forwardLSTM(graph, model, hidden_sizes, x, prev) {
197+
// forward prop for a single tick of LSTM
198+
// G is graph to append ops to
199+
// model contains LSTM parameters
200+
// x is 1D column vector with observation
201+
// prev is a struct containing hidden and cell
202+
// from previous iteration
203+
204+
var hidden_prevs,
205+
cell_prevs;
206+
if(typeof prev.h === 'undefined') {
207+
hidden_prevs = [];
208+
cell_prevs = [];
209+
for(var d=0;d<hidden_sizes.length;d++) {
210+
hidden_prevs.push(new Matrix(hidden_sizes[d],1));
211+
cell_prevs.push(new Matrix(hidden_sizes[d],1));
212+
}
213+
} else {
214+
hidden_prevs = prev.h;
215+
cell_prevs = prev.c;
216+
}
217+
218+
var hidden = [];
219+
var cell = [];
220+
for(var d=0;d<hidden_sizes.length;d++) {
221+
222+
var input_vector = d === 0 ? x : hidden[d-1];
223+
var hidden_prev = hidden_prevs[d];
224+
var cell_prev = cell_prevs[d];
225+
226+
// input gate
227+
var h0 = multiply(model[d].wix, input_vector);
228+
var h1 = multiply(model[d].wih, hidden_prev);
229+
var input_gate = graph.sigmoid(add(add(h0,h1),model[d].bi));
230+
231+
// forget gate
232+
var h2 = multiply(model[d].wfx, input_vector);
233+
var h3 = multiply(model[d].wfh, hidden_prev);
234+
var forget_gate = graph.sigmoid(add(add(h2, h3),model[d].bf));
235+
236+
// output gate
237+
var h4 = multiply(model[d].wox, input_vector);
238+
var h5 = multiply(model[d].woh, hidden_prev);
239+
var output_gate = graph.sigmoid(add(add(h4, h5),model[d].bo));
240+
241+
// write operation on cells
242+
var h6 = multiply(model[d].wcx, input_vector);
243+
var h7 = multiply(model[d].wch, hidden_prev);
244+
var cell_write = graph.tanh(add(add(h6, h7),model[d].bc));
245+
246+
// compute new cell activation
247+
var retain_cell = multiplyElement(forget_gate, cell_prev); // what do we keep from cell
248+
var write_cell = multiplyElement(input_gate, cell_write); // what do we write to cell
249+
var cell_d = add(retain_cell, write_cell); // new cell contents
250+
251+
// compute hidden state as gated, saturated cell activations
252+
var hidden_d = multiplyElement(output_gate, graph.tanh(cell_d));
253+
254+
hidden.push(hidden_d);
255+
cell.push(cell_d);
256+
}
257+
258+
// one decoder to outputs at end
259+
var output = add(multiply(model.whd, hidden[hidden.length - 1]), model.bd);
260+
261+
// return cell memory, hidden representation and output
262+
return {
263+
h: hidden,
264+
c: cell,
265+
o: output
266+
};
267+
}
268+
269+
function RNN(input_size, hidden_sizes, output_size) {
270+
// hidden size should be a list
271+
272+
var model = [];
273+
for(var d=0;d<hidden_sizes.length;d++) { // loop over depths
274+
var prev_size = d === 0 ? input_size : hidden_sizes[d - 1];
275+
var hidden_size = hidden_sizes[d];
276+
model.push({
277+
wxh: new RandomMatrix(hidden_size, prev_size, 0, 0.08),
278+
whh: new RandomMatrix(hidden_size, hidden_size, 0, 0.08),
279+
bhh: new Matrix(hidden_size, 1)
280+
});
281+
}
282+
// decoder params
283+
model.whd = new RandomMatrix(output_size, hidden_size, 0, 0.08);
284+
model.bd = new Matrix(output_size, 1);
285+
return model;
286+
}
287+
288+
/**
289+
*
290+
* @param {Graph} graph
291+
* @param model
292+
* @param hidden_sizes
293+
* @param x
294+
* @param prev
295+
* @returns {{h: Array, o}}
296+
*/
297+
function forwardRNN(graph, model, hidden_sizes, x, prev) {
298+
// forward prop for a single tick of RNN
299+
// G is graph to append ops to
300+
// model contains RNN parameters
301+
// x is 1D column vector with observation
302+
// prev is a struct containing hidden activations from last step
303+
var hidden_prevs;
304+
if(typeof prev.h === 'undefined') {
305+
hidden_prevs = [];
306+
for(var d=0;d<hidden_sizes.length;d++) {
307+
hidden_prevs.push(new Matrix(hidden_sizes[d],1));
308+
}
309+
} else {
310+
hidden_prevs = prev.h;
311+
}
312+
313+
var hidden = [];
314+
for(var d=0;d<hidden_sizes.length;d++) {
315+
316+
var input_vector = d === 0 ? x : hidden[d-1];
317+
var hidden_prev = hidden_prevs[d];
318+
319+
var h0 = multiply(model[d].wxh, input_vector);
320+
var h1 = multiply(model[d].whh, hidden_prev);
321+
var hidden_d = graph.relu(add(add(h0, h1), model[d].bhh));
322+
323+
hidden.push(hidden_d);
324+
}
325+
326+
// one decoder to outputs at end
327+
var output = add(multiply(model.whd, hidden[hidden.length - 1]), model.bd);
328+
329+
// return cell memory, hidden representation and output
330+
return {
331+
h: hidden,
332+
o: output
333+
};
334+
}
335+
336+
function sig(x) {
337+
// helper function for computing sigmoid
338+
return 1.0 / (1 + Math.exp(-x));
339+
}
340+
341+
function maxi(w) {
342+
// argmax of array w
343+
var maxv = w[0];
344+
var maxix = 0;
345+
for(var i=1,n=w.length;i<n;i++) {
346+
var v = w[i];
347+
if(v > maxv) {
348+
maxix = i;
349+
maxv = v;
350+
}
351+
}
352+
return maxix;
353+
}
354+
355+
function samplei(w) {
356+
// sample argmax from w, assuming w are
357+
// probabilities that sum to one
358+
var r = randf(0,1);
359+
var x = 0.0;
360+
var i = 0;
361+
while(true) {
362+
x += w[i];
363+
if(x > r) { return i; }
364+
i++;
365+
}
366+
}
367+
368+
// various utils
369+
module.exports = {
370+
maxi: maxi,
371+
samplei: samplei,
372+
randi: randi,
373+
softmax: softmax,
374+
375+
// classes
376+
Matrix: Matrix,
377+
RandomMatrix: RandomMatrix,
378+
379+
forwardLSTM: forwardLSTM,
380+
LSTM: LSTM,
381+
forwardRNN: forwardRNN,
382+
RNN: RNN,
383+
384+
// optimization
385+
Solver: Solver,
386+
Graph: Graph
387+
};

‎lib/recurrent/lstm.js

Whitespace-only changes.

‎lib/recurrent/matrix/add.js

+17
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
module.export = function add(m1, m2, backPropagateArray) {
2+
if (m1.w.length !== m2.w.length) throw new Error('matrix addition dimensions misaligned');
3+
4+
var out = new Matrix(m1.n, m1.d);
5+
for(var i=0,n=m1.w.length;i<n;i++) {
6+
out.w[i] = m1.w[i] + m2.w[i];
7+
}
8+
if(typeof backPropagateArray !== 'undefined') {
9+
backPropagateArray.push(function backward() {
10+
for(var i=0,n=m1.w.length;i<n;i++) {
11+
m1.dw[i] += out.dw[i];
12+
m2.dw[i] += out.dw[i];
13+
}
14+
});
15+
}
16+
return out;
17+
};

‎lib/recurrent/matrix/index.js

+78
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,78 @@
1+
var zeros = require('./zeros');
2+
3+
// Mat holds a matrix
4+
function Matrix(n, d) {
5+
// n is number of rows d is number of columns
6+
this.n = n;
7+
this.d = d;
8+
this.w = zeros(n * d);
9+
this.dw = zeros(n * d);
10+
}
11+
12+
Matrix.prototype = {
13+
get: function(row, col) {
14+
// slow but careful accessor function
15+
// we want row-major order
16+
var ix = (this.d * row) + col;
17+
if (ix < 0 && ix >= this.w.length) throw new Error('get accessor is skewed');
18+
return this.w[ix];
19+
},
20+
set: function(row, col, v) {
21+
// slow but careful accessor function
22+
var ix = (this.d * row) + col;
23+
if (ix < 0 && ix >= this.w.length) throw new Error('set accessor is skewed');
24+
this.w[ix] = v;
25+
},
26+
toJSON: function() {
27+
return {
28+
n: this.n,
29+
d: this.d,
30+
w: this.w
31+
};
32+
},
33+
fromJSON: function(json) {
34+
this.n = json.n;
35+
this.d = json.d;
36+
this.w = zeros(this.n * this.d);
37+
this.dw = zeros(this.n * this.d);
38+
for(var i=0,n=this.n * this.d;i<n;i++) {
39+
this.w[i] = json.w[i]; // copy over weights
40+
}
41+
},
42+
43+
// fill matrix with random gaussian numbers
44+
fillRandN: function(mu, std) {
45+
for(var i=0,n=this.w.length;i<n;i++) {
46+
this.w[i] = randn(mu, std);
47+
}
48+
},
49+
50+
// fill matrix with random gaussian numbers
51+
fillRand: function(lo, hi) {
52+
for(var i=0,n=this.w.length;i<n;i++) {
53+
this.w[i] = randf(lo, hi);
54+
}
55+
}
56+
};
57+
58+
function randn(mu, std){ return mu+gaussRandom()*std; }
59+
60+
// Random numbers utils
61+
var return_v = false;
62+
var v_val = 0.0;
63+
function gaussRandom() {
64+
if(return_v) {
65+
return_v = false;
66+
return v_val;
67+
}
68+
var u = 2*Math.random()-1;
69+
var v = 2*Math.random()-1;
70+
var r = u*u + v*v;
71+
if(r == 0 || r > 1) return gaussRandom();
72+
var c = Math.sqrt(-2*Math.log(r)/r);
73+
v_val = v*c; // cache this
74+
return_v = true;
75+
return u*c;
76+
}
77+
78+
module.export = Matrix;
+18
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
module.exports = function elementMultiply(m1, m2, backPropagateArray) {
2+
if (m1.w.length !== m2.w.length) throw new Error('matrix element multiplication dimensions misaligned');
3+
4+
var out = new Matrix(m1.n, m1.d);
5+
for(var i=0,n=m1.w.length;i<n;i++) {
6+
out.w[i] = m1.w[i] * m2.w[i];
7+
8+
}
9+
if(typeof backPropagateArray !== 'undefined') {
10+
backPropagateArray.push(function backward() {
11+
for(var i=0,n=m1.w.length;i<n;i++) {
12+
m1.dw[i] += m2.w[i] * out.dw[i];
13+
m2.dw[i] += m1.w[i] * out.dw[i];
14+
}
15+
});
16+
}
17+
return out;
18+
};

‎lib/recurrent/matrix/multiply.js

+38
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
/**
2+
* multiply matrices m1 * m2
3+
* @param {Matrix} m1
4+
* @param {Matrix} m2
5+
* @param backPropagateArray
6+
* @returns {Matrix}
7+
*/
8+
module.exports = function multiply(m1, m2, backPropagateArray) {
9+
if (m1.d !== m2.n) throw new Error('matrix multiplication dimensions misaligned');
10+
11+
var n = m1.n;
12+
var d = m2.d;
13+
var out = new Matrix(n, d);
14+
for(var i=0;i<m1.n;i++) { // loop over rows of m1
15+
for(var j=0;j<m2.d;j++) { // loop over cols of m2
16+
var dot = 0.0;
17+
for(var k=0;k<m1.d;k++) { // dot product loop
18+
dot += m1.w[m1.d*i+k] * m2.w[m2.d*k+j];
19+
}
20+
out.w[d*i+j] = dot;
21+
}
22+
}
23+
24+
if(typeof backPropagateArray !== 'undefined') {
25+
backPropagateArray.push(function backward() {
26+
for(var i=0;i<m1.n;i++) { // loop over rows of m1
27+
for(var j=0;j<m2.d;j++) { // loop over cols of m2
28+
for(var k=0;k<m1.d;k++) { // dot product loop
29+
var b = out.dw[d*i+j];
30+
m1.dw[m1.d*i+k] += m2.w[m2.d*k+j] * b;
31+
m2.dw[m2.d*k+j] += m1.w[m1.d*i+k] * b;
32+
}
33+
}
34+
}
35+
});
36+
}
37+
return out;
38+
};

‎lib/recurrent/matrix/random-n.js

+12
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
var Matrix = require('./index');
2+
3+
function RandomMatrixN(n,d,mu,std) {
4+
this.n = n;
5+
this.d = d;
6+
this.w = zeros(n * d);
7+
this.dw = zeros(n * d);
8+
this.fillRandN(mu, std);
9+
}
10+
RandomMatrixN.prototype = Matrix.prototype;
11+
12+
module.exports = RandomMatrixN;

‎lib/recurrent/matrix/random.js

+17
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
var Matrix = require('./index');
2+
3+
/** return Mat but filled with random numbers from gaussian
4+
* @param n
5+
* @param d
6+
* @param mu
7+
* @param std
8+
* @constructor
9+
*/
10+
function RandomMatrix(n,d,mu,std) {
11+
this.w = zeros(n * d);
12+
this.dw = zeros(n * d);
13+
this.fillRand(-std, std); // kind of :P
14+
}
15+
RandomMatrix.prototype = Matrix.prototype;
16+
17+
module.exports = RandomMatrix;

‎lib/recurrent/matrix/zeros.js

+12
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
// helper function returns array of zeros of length n
2+
// and uses typed arrays if available
3+
module.export = function zeros(n) {
4+
if(typeof ArrayBuffer === 'undefined') {
5+
// lacking browser support
6+
var arr = new Array(n);
7+
for(var i=0;i<n;i++) { arr[i] = 0; }
8+
return arr;
9+
} else {
10+
return new Float64Array(n);
11+
}
12+
};

‎lib/recurrent/maxi.js

Whitespace-only changes.

‎lib/recurrent/random.js

+7
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
function randf(a, b) { return Math.random()*(b-a)+a; }
2+
function randi(a, b) { return Math.floor(Math.random()*(b-a)+a); }
3+
4+
module.exports = {
5+
f: randf,
6+
i: randi
7+
};

‎lib/recurrent/rnn.js

Whitespace-only changes.

‎lib/recurrent/sample-i.js

Whitespace-only changes.

‎lib/recurrent/softmax.js

Whitespace-only changes.

‎lib/train-stream.js

+132
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,132 @@
1+
import { Writable } from 'stream';
2+
3+
class TrainStream extends Writable {
4+
constructor(opts = {}) {
5+
super({ objectMode: true });
6+
7+
// require the neuralNetwork
8+
if (!opts.neuralNetwork) {
9+
throw new Error('no neural network specified');
10+
}
11+
12+
this.neuralNetwork = opts.neuralNetwork;
13+
this.dataFormatDetermined = false;
14+
15+
this.inputKeys = [];
16+
this.outputKeys = []; // keeps track of keys seen
17+
this.i = 0; // keep track of the for loop i variable that we got rid of
18+
this.iterations = opts.iterations || 20000;
19+
this.errorThresh = opts.errorThresh || 0.005;
20+
this.log = opts.log ? (typeof opts.log === 'function' ? opts.log : console.log) : false;
21+
this.logPeriod = opts.logPeriod || 10;
22+
this.callback = opts.callback;
23+
this.callbackPeriod = opts.callbackPeriod || 10;
24+
this.floodCallback = opts.floodCallback;
25+
this.doneTrainingCallback = opts.doneTrainingCallback;
26+
27+
this.size = 0;
28+
this.count = 0;
29+
30+
this.sum = 0;
31+
32+
this.on('finish', this.finishStreamIteration);
33+
34+
return this;
35+
}
36+
37+
/*
38+
_write expects data to be in the form of a datum.
39+
ie. {input: {a: 1 b: 0}, output: {z: 0}}
40+
*/
41+
_write(chunk, enc, next) {
42+
if (!chunk) { // check for the end of one interation of the stream
43+
this.emit('finish');
44+
return next();
45+
}
46+
47+
if (!this.dataFormatDetermined) {
48+
this.size++;
49+
this.inputKeys = _.union(this.inputKeys, _.keys(chunk.input));
50+
this.outputKeys = _.union(this.outputKeys, _.keys(chunk.output));
51+
this.firstDatum = this.firstDatum || chunk;
52+
return next();
53+
}
54+
55+
this.count++;
56+
57+
let data = this.neuralNetwork.formatData(chunk);
58+
this.trainDatum(data[0]);
59+
60+
// tell the Readable Stream that we are ready for more data
61+
next();
62+
}
63+
64+
trainDatum(datum) {
65+
let err = this.neuralNetwork.trainPattern(datum.input, datum.output);
66+
this.sum += err;
67+
}
68+
69+
finishStreamIteration () {
70+
if (this.dataFormatDetermined && this.size !== this.count) {
71+
console.log("This iteration's data length was different from the first.");
72+
}
73+
74+
if (!this.dataFormatDetermined) {
75+
// create the lookup
76+
this.neuralNetwork.inputLookup = lookup.lookupFromArray(this.inputKeys);
77+
if(!_.isArray(this.firstDatum.output)){
78+
this.neuralNetwork.outputLookup = lookup.lookupFromArray(this.outputKeys);
79+
}
80+
81+
let data = this.neuralNetwork.formatData(this.firstDatum)
82+
, inputSize = data[0].input.length
83+
, outputSize = data[0].output.length
84+
, hiddenSizes = this.hiddenSizes
85+
;
86+
87+
if (!hiddenSizes) {
88+
hiddenSizes = [Math.max(3, Math.floor(inputSize / 2))];
89+
}
90+
let sizes = _([inputSize, hiddenSizes, outputSize]).flatten();
91+
this.dataFormatDetermined = true;
92+
this.neuralNetwork.initialize(sizes);
93+
94+
if (typeof this.floodCallback === 'function') {
95+
this.floodCallback();
96+
}
97+
return;
98+
}
99+
100+
let error = this.sum / this.size;
101+
102+
if (this.log && (this.i % this.logPeriod == 0)) {
103+
this.log("iterations:", this.i, "training error:", error);
104+
}
105+
if (this.callback && (this.i % this.callbackPeriod == 0)) {
106+
this.callback({
107+
error: error,
108+
iterations: this.i
109+
});
110+
}
111+
112+
this.sum = 0;
113+
this.count = 0;
114+
// update the iterations
115+
this.i++;
116+
117+
// do a check here to see if we need the stream again
118+
if (this.i < this.iterations && error > this.errorThresh) {
119+
if (typeof this.floodCallback === 'function') {
120+
return this.floodCallback();
121+
}
122+
} else {
123+
// done training
124+
if (typeof this.doneTrainingCallback === 'function') {
125+
return this.doneTrainingCallback({
126+
error: error,
127+
iterations: this.i
128+
});
129+
}
130+
}
131+
}
132+
}

‎lib/utilities/mse.js

+8
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
export default function mse(errors) {
2+
// mean squared error
3+
let sum = 0;
4+
for (let i = 0; i < errors.length; i++) {
5+
sum += Math.pow(errors[i], 2);
6+
}
7+
return sum / errors.length;
8+
}

‎lib/utilities/random-weight.js

+3
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
export default function randomWeight() {
2+
return Math.random() * 0.4 - 0.2;
3+
}

‎lib/utilities/randos.js

+9
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
import randomWeight from './utilities/random-weight';
2+
3+
export default function randos(size) {
4+
let array = new Array(size);
5+
for (let i = 0; i < size; i++) {
6+
array[i] = randomWeight();
7+
}
8+
return array;
9+
}

‎lib/utilities/zeros.js

+7
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
export default function zeros(size) {
2+
let array = new Array(size);
3+
for (let i = 0; i < size; i++) {
4+
array[i] = 0;
5+
}
6+
return array;
7+
}

0 commit comments

Comments
 (0)
Please sign in to comment.