Mercurial > pylearn
annotate pylearn/algorithms/tests/test_daa.py @ 1525:9c24a2bdbe90
a test is too slow when run in debugmode, so we force fast_run in that case.
author | Frederic Bastien <nouiz@nouiz.org> |
---|---|
date | Fri, 09 Nov 2012 14:48:26 -0500 |
parents | 70b061f142b2 |
children |
rev | line source |
---|---|
493 | 1 #!/usr/bin/python |
2 | |
3 from pylearn import algorithms as models | |
4 import theano | |
5 import numpy | |
6 import time | |
7 | |
500 | 8 import pylearn.algorithms.logistic_regression |
905 | 9 from theano import config |
10 from pylearn.algorithms.stacker import Stacker | |
11 from pylearn.algorithms.daa import SigmoidXEDenoisingAA | |
12 from pylearn.algorithms.regressor import BinRegressor | |
13 def test_train_daa(mode = config.mode): | |
493 | 14 |
635
89bc88affef0
Reverting out of changeset 633,634. Hopefully i did this write !
projects@lgcm
parents:
633
diff
changeset
|
15 ndaa = 3 |
905 | 16 daa = Stacker([(SigmoidXEDenoisingAA, 'hidden')] * ndaa + [(BinRegressor, 'output')], |
493 | 17 regularize = False) |
18 | |
19 model = daa.make([4, 20, 20, 20, 1], | |
20 lr = 0.01, | |
21 mode = mode, | |
22 seed = 10) | |
23 | |
24 model.layers[0].noise_level = 0.3 | |
25 model.layers[1].noise_level = 0.3 | |
26 model.layers[2].noise_level = 0.3 | |
27 | |
28 # Update the first hidden layer | |
501
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
29 for l in range(3): |
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
30 for i in range(10): |
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
31 model.local_update[l]([[0, 1, 0, 1]]) |
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
32 model.local_update[l]([[1, 0, 1, 0]]) |
493 | 33 |
533
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
34 for i in range(10): |
501
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
35 model.update([[0, 1, 0, 1]], [[1]]) |
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
36 model.update([[1, 0, 1, 0]], [[0]]) |
493 | 37 print model.classify([[0, 1, 0, 1]]) |
501
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
38 print model.classify([[1, 0, 1, 0]]) |
493 | 39 |
40 | |
500 | 41 def test_train_daa2(mode = theano.Mode('c|py', 'fast_run')): |
42 | |
43 ndaa = 3 | |
905 | 44 daa = Stacker([(SigmoidXEDenoisingAA, 'hidden')] * ndaa + [(pylearn.algorithms.logistic_regression.Module_Nclass, 'pred')], |
500 | 45 regularize = False) |
46 | |
533
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
47 model = daa.make([4] + [20] * ndaa + [10], |
500 | 48 lr = 0.01, |
49 mode = mode, | |
50 seed = 10) | |
51 | |
533
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
52 for l in range(ndaa): model.layers[l].noise_level = 0.3 |
500 | 53 |
533
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
54 instances = [([[0, 1, 0, 1]], [1]), ([[1, 0, 1, 0]], [0])] |
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
55 |
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
56 for l in range(ndaa): |
501
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
57 for i in range(10): |
533
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
58 for (input, output) in instances: |
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
59 model.local_update[l](input) |
500 | 60 |
533
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
61 for i in range(10): |
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
62 for (input, output) in instances: |
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
63 # model.update(input, output) |
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
64 print "OLD:", |
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
65 print model.validate(input, output) |
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
66 oldloss = model.update(input, output) |
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
67 print oldloss |
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
68 print "NEW:" |
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
69 print model.validate(input, output) |
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
70 print |
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
71 |
501
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
72 print model.apply([[0, 1, 0, 1]]) |
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
73 print model.apply([[1, 0, 1, 0]]) |
500 | 74 |
493 | 75 |
76 | |
77 | |
78 if __name__ == '__main__': | |
79 # print 'optimized:' | |
80 # t1 = test_train_daa(theano.Mode('py', 'fast_compile')) | |
81 # t1 = test_train_daa(theano.Mode('c|py', 'fast_run')) | |
82 # print 'time:',t1 | |
83 # print | |
84 | |
85 # print 'not optimized:' | |
86 # t2 = test_train_daa(theano.Mode('c|py', 'fast_compile')) | |
87 ## print 'time:',t2 | |
88 | |
89 # test_train_daa(theano.compile.Mode('c&py', 'merge')) | |
501
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
90 # test_train_daa(theano.compile.Mode('c|py', 'merge')) |
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
91 test_train_daa(theano.compile.Mode('py', 'merge')) |
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
92 |
500 | 93 test_train_daa2(theano.compile.Mode('c|py', 'merge')) |