Mercurial > pylearn
diff algorithms/tests/test_daa.py @ 493:32509c479e2d
Added test_daa.py
author | Joseph Turian <turian@gmail.com> |
---|---|
date | Tue, 28 Oct 2008 11:40:31 -0400 |
parents | |
children | 3c60c2db0319 |
line wrap: on
line diff
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/algorithms/tests/test_daa.py Tue Oct 28 11:40:31 2008 -0400 @@ -0,0 +1,48 @@ +#!/usr/bin/python + +from pylearn import algorithms as models +import theano +import numpy +import time + + +def test_train_daa(mode = theano.Mode('c|py', 'fast_run')): + + ndaa = 3 + daa = models.Stacker([(models.SigmoidXEDenoisingAA, 'hidden')] * ndaa + [(models.BinRegressor, 'output')], + regularize = False) + + model = daa.make([4, 20, 20, 20, 1], + lr = 0.01, + mode = mode, + seed = 10) + + model.layers[0].noise_level = 0.3 + model.layers[1].noise_level = 0.3 + model.layers[2].noise_level = 0.3 + + # Update the first hidden layer + model.local_update[0]([[0, 1, 0, 1]]) + model.local_update[1]([[0, 1, 0, 1]]) + model.local_update[2]([[0, 1, 0, 1]]) + + model.update([[0, 1, 0, 1]], [[0]]) + print model.classify([[0, 1, 0, 1]]) + + + + + +if __name__ == '__main__': +# print 'optimized:' +# t1 = test_train_daa(theano.Mode('py', 'fast_compile')) +# t1 = test_train_daa(theano.Mode('c|py', 'fast_run')) +# print 'time:',t1 +# print + +# print 'not optimized:' +# t2 = test_train_daa(theano.Mode('c|py', 'fast_compile')) +## print 'time:',t2 + +# test_train_daa(theano.compile.Mode('c&py', 'merge')) + test_train_daa(theano.compile.Mode('c|py', 'merge'))