Mercurial > pylearn
diff test_mlp.py @ 186:562f308873f0
added ManualNNet
author | James Bergstra <bergstrj@iro.umontreal.ca> |
---|---|
date | Tue, 13 May 2008 20:10:03 -0400 |
parents | 25d0a0c713da |
children | ebbb0e749565 |
line wrap: on
line diff
--- a/test_mlp.py Tue May 13 19:37:29 2008 -0400 +++ b/test_mlp.py Tue May 13 20:10:03 2008 -0400 @@ -1,6 +1,7 @@ from mlp import * import dataset +import nnet_ops from functools import partial @@ -64,5 +65,40 @@ for fieldname in output_ds.fieldNames(): print fieldname+"=",output_ds[fieldname] -test0() +def test1(): + nnet = ManualNNet(2, 10,3,.1,1000) + training_set = dataset.ArrayDataSet(numpy.array([[0, 0, 0], + [0, 1, 1], + [1, 0, 1], + [1, 1, 1]]), + {'input':slice(2),'target':2}) + fprop=nnet(training_set) + + output_ds = fprop(training_set) + + for fieldname in output_ds.fieldNames(): + print fieldname+"=",output_ds[fieldname] +def test2(): + training_set = dataset.ArrayDataSet(numpy.array([[0, 0, 0], + [0, 1, 1], + [1, 0, 1], + [1, 1, 1]]), + {'input':slice(2),'target':2}) + nin, nhid=2, 10 + def sigm_layer(input): + W1 = t.matrix('W1') + b1 = t.vector('b1') + return (nnet_ops.sigmoid(b1 + t.dot(input, W1)), + [W1, b1], + [(numpy.random.rand(nin, nhid) -0.5) * 0.001, numpy.zeros(nhid)]) + nnet = ManualNNet(nin, nhid, 3, .1, 1000, hidden_layer=sigm_layer) + fprop=nnet(training_set) + + output_ds = fprop(training_set) + + for fieldname in output_ds.fieldNames(): + print fieldname+"=",output_ds[fieldname] +test1() +test2() +