Mercurial > pylearn
view algorithms/tests/test_daa.py @ 672:27b1344a57b1
Added preprocessing back in
author | Joseph Turian <turian@gmail.com> |
---|---|
date | Thu, 20 Nov 2008 06:38:06 -0500 |
parents | de974b4fc4ea |
children |
line wrap: on
line source
#!/usr/bin/python from pylearn import algorithms as models import theano import numpy import time import pylearn.algorithms.logistic_regression def test_train_daa(mode = theano.Mode('c|py', 'fast_run')): ndaa = 3 daa = models.Stacker([(models.SigmoidXEDenoisingAA, 'hidden')] * ndaa + [(models.BinRegressor, 'output')], regularize = False) model = daa.make([4, 20, 20, 20, 1], lr = 0.01, mode = mode, seed = 10) model.layers[0].noise_level = 0.3 model.layers[1].noise_level = 0.3 model.layers[2].noise_level = 0.3 # Update the first hidden layer for l in range(3): for i in range(10): model.local_update[l]([[0, 1, 0, 1]]) model.local_update[l]([[1, 0, 1, 0]]) for i in range(10): model.update([[0, 1, 0, 1]], [[1]]) model.update([[1, 0, 1, 0]], [[0]]) print model.classify([[0, 1, 0, 1]]) print model.classify([[1, 0, 1, 0]]) def test_train_daa2(mode = theano.Mode('c|py', 'fast_run')): ndaa = 3 daa = models.Stacker([(models.SigmoidXEDenoisingAA, 'hidden')] * ndaa + [(pylearn.algorithms.logistic_regression.Module_Nclass, 'pred')], regularize = False) model = daa.make([4] + [20] * ndaa + [10], lr = 0.01, mode = mode, seed = 10) for l in range(ndaa): model.layers[l].noise_level = 0.3 instances = [([[0, 1, 0, 1]], [1]), ([[1, 0, 1, 0]], [0])] for l in range(ndaa): for i in range(10): for (input, output) in instances: model.local_update[l](input) for i in range(10): for (input, output) in instances: # model.update(input, output) print "OLD:", print model.validate(input, output) oldloss = model.update(input, output) print oldloss print "NEW:" print model.validate(input, output) print print model.apply([[0, 1, 0, 1]]) print model.apply([[1, 0, 1, 0]]) if __name__ == '__main__': # print 'optimized:' # t1 = test_train_daa(theano.Mode('py', 'fast_compile')) # t1 = test_train_daa(theano.Mode('c|py', 'fast_run')) # print 'time:',t1 # print # print 'not optimized:' # t2 = test_train_daa(theano.Mode('c|py', 'fast_compile')) ## print 'time:',t2 # test_train_daa(theano.compile.Mode('c&py', 'merge')) # test_train_daa(theano.compile.Mode('c|py', 'merge')) test_train_daa(theano.compile.Mode('py', 'merge')) test_train_daa2(theano.compile.Mode('c|py', 'merge'))