Mercurial > pylearn
changeset 501:4fb6f7320518
N-class logistic regression top-layer works
author | Joseph Turian <turian@gmail.com> |
---|---|
date | Tue, 28 Oct 2008 13:54:01 -0400 |
parents | 3c60c2db0319 |
children | 17945defd813 |
files | algorithms/logistic_regression.py algorithms/tests/test_daa.py |
diffstat | 2 files changed, 26 insertions(+), 15 deletions(-) [+] |
line wrap: on
line diff
--- a/algorithms/logistic_regression.py Tue Oct 28 13:36:27 2008 -0400 +++ b/algorithms/logistic_regression.py Tue Oct 28 13:54:01 2008 -0400 @@ -8,8 +8,11 @@ import numpy as N class LogRegInstanceType(module.FancyModuleInstance): - def initialize(self, n_in, n_out=1, rng=N.random): + def initialize(self, n_in, n_out=1, rng=N.random, seed=None): #self.component is the LogisticRegressionTemplate instance that built this guy. + """ + @todo: Remove seed. Used only to keep Stacker happy. + """ self.w = N.zeros((n_in, n_out)) self.b = N.zeros(n_out)
--- a/algorithms/tests/test_daa.py Tue Oct 28 13:36:27 2008 -0400 +++ b/algorithms/tests/test_daa.py Tue Oct 28 13:54:01 2008 -0400 @@ -23,21 +23,25 @@ model.layers[2].noise_level = 0.3 # Update the first hidden layer - model.local_update[0]([[0, 1, 0, 1]]) - model.local_update[1]([[0, 1, 0, 1]]) - model.local_update[2]([[0, 1, 0, 1]]) + for l in range(3): + for i in range(10): + model.local_update[l]([[0, 1, 0, 1]]) + model.local_update[l]([[1, 0, 1, 0]]) - model.update([[0, 1, 0, 1]], [[0]]) + for i in range(1): + model.update([[0, 1, 0, 1]], [[1]]) + model.update([[1, 0, 1, 0]], [[0]]) print model.classify([[0, 1, 0, 1]]) + print model.classify([[1, 0, 1, 0]]) def test_train_daa2(mode = theano.Mode('c|py', 'fast_run')): ndaa = 3 - daa = models.Stacker([(models.SigmoidXEDenoisingAA, 'hidden')] * ndaa + [(pylearn.algorithms.logistic_regression.Module_Nclass, 'output')], + daa = models.Stacker([(models.SigmoidXEDenoisingAA, 'hidden')] * ndaa + [(pylearn.algorithms.logistic_regression.Module_Nclass, 'pred')], regularize = False) - model = daa.make([4, 20, 20, 20, 1], + model = daa.make([4, 20, 20, 20, 10], lr = 0.01, mode = mode, seed = 10) @@ -46,14 +50,16 @@ model.layers[1].noise_level = 0.3 model.layers[2].noise_level = 0.3 - # Update the first hidden layer - model.local_update[0]([[0, 1, 0, 1]]) - model.local_update[1]([[0, 1, 0, 1]]) - model.local_update[2]([[0, 1, 0, 1]]) + for l in range(3): + for i in range(10): + model.local_update[l]([[0, 1, 0, 1]]) + model.local_update[l]([[1, 0, 1, 0]]) - model.update([[0, 1, 0, 1]], [0]) - print model.classify([[0, 1, 0, 1]]) - + for i in range(1): + model.update([[0, 1, 0, 1]], [1]) + model.update([[1, 0, 1, 0]], [0]) + print model.apply([[0, 1, 0, 1]]) + print model.apply([[1, 0, 1, 0]]) @@ -70,5 +76,7 @@ ## print 'time:',t2 # test_train_daa(theano.compile.Mode('c&py', 'merge')) - test_train_daa(theano.compile.Mode('c|py', 'merge')) +# test_train_daa(theano.compile.Mode('c|py', 'merge')) + test_train_daa(theano.compile.Mode('py', 'merge')) + test_train_daa2(theano.compile.Mode('c|py', 'merge'))