Mercurial > pylearn
changeset 500:3c60c2db0319
Added new daa test
author | Joseph Turian <turian@gmail.com> |
---|---|
date | Tue, 28 Oct 2008 13:36:27 -0400 |
parents | a419edf4e06c |
children | 4fb6f7320518 |
files | algorithms/tests/test_daa.py |
diffstat | 1 files changed, 26 insertions(+), 0 deletions(-) [+] |
line wrap: on
line diff
--- a/algorithms/tests/test_daa.py Tue Oct 28 12:57:49 2008 -0400 +++ b/algorithms/tests/test_daa.py Tue Oct 28 13:36:27 2008 -0400 @@ -5,6 +5,7 @@ import numpy import time +import pylearn.algorithms.logistic_regression def test_train_daa(mode = theano.Mode('c|py', 'fast_run')): @@ -30,6 +31,30 @@ print model.classify([[0, 1, 0, 1]]) +def test_train_daa2(mode = theano.Mode('c|py', 'fast_run')): + + ndaa = 3 + daa = models.Stacker([(models.SigmoidXEDenoisingAA, 'hidden')] * ndaa + [(pylearn.algorithms.logistic_regression.Module_Nclass, 'output')], + regularize = False) + + model = daa.make([4, 20, 20, 20, 1], + lr = 0.01, + mode = mode, + seed = 10) + + model.layers[0].noise_level = 0.3 + model.layers[1].noise_level = 0.3 + model.layers[2].noise_level = 0.3 + + # Update the first hidden layer + model.local_update[0]([[0, 1, 0, 1]]) + model.local_update[1]([[0, 1, 0, 1]]) + model.local_update[2]([[0, 1, 0, 1]]) + + model.update([[0, 1, 0, 1]], [0]) + print model.classify([[0, 1, 0, 1]]) + + @@ -46,3 +71,4 @@ # test_train_daa(theano.compile.Mode('c&py', 'merge')) test_train_daa(theano.compile.Mode('c|py', 'merge')) + test_train_daa2(theano.compile.Mode('c|py', 'merge'))