Mercurial > pylearn
diff algorithms/tests/test_daa.py @ 533:de974b4fc4ea
Bugfix in pylearn.embeddings.length()
author | Joseph Turian <turian@gmail.com> |
---|---|
date | Tue, 18 Nov 2008 03:25:54 -0500 |
parents | 4fb6f7320518 |
children |
line wrap: on
line diff
--- a/algorithms/tests/test_daa.py Tue Nov 18 02:57:50 2008 -0500 +++ b/algorithms/tests/test_daa.py Tue Nov 18 03:25:54 2008 -0500 @@ -28,7 +28,7 @@ model.local_update[l]([[0, 1, 0, 1]]) model.local_update[l]([[1, 0, 1, 0]]) - for i in range(1): + for i in range(10): model.update([[0, 1, 0, 1]], [[1]]) model.update([[1, 0, 1, 0]], [[0]]) print model.classify([[0, 1, 0, 1]]) @@ -41,23 +41,31 @@ daa = models.Stacker([(models.SigmoidXEDenoisingAA, 'hidden')] * ndaa + [(pylearn.algorithms.logistic_regression.Module_Nclass, 'pred')], regularize = False) - model = daa.make([4, 20, 20, 20, 10], + model = daa.make([4] + [20] * ndaa + [10], lr = 0.01, mode = mode, seed = 10) - model.layers[0].noise_level = 0.3 - model.layers[1].noise_level = 0.3 - model.layers[2].noise_level = 0.3 + for l in range(ndaa): model.layers[l].noise_level = 0.3 - for l in range(3): + instances = [([[0, 1, 0, 1]], [1]), ([[1, 0, 1, 0]], [0])] + + for l in range(ndaa): for i in range(10): - model.local_update[l]([[0, 1, 0, 1]]) - model.local_update[l]([[1, 0, 1, 0]]) + for (input, output) in instances: + model.local_update[l](input) - for i in range(1): - model.update([[0, 1, 0, 1]], [1]) - model.update([[1, 0, 1, 0]], [0]) + for i in range(10): + for (input, output) in instances: +# model.update(input, output) + print "OLD:", + print model.validate(input, output) + oldloss = model.update(input, output) + print oldloss + print "NEW:" + print model.validate(input, output) + print + print model.apply([[0, 1, 0, 1]]) print model.apply([[1, 0, 1, 0]])