Mercurial > pylearn
annotate algorithms/tests/test_daa.py @ 672:27b1344a57b1
Added preprocessing back in
author | Joseph Turian <turian@gmail.com> |
---|---|
date | Thu, 20 Nov 2008 06:38:06 -0500 |
parents | de974b4fc4ea |
children |
rev | line source |
---|---|
493 | 1 #!/usr/bin/python |
2 | |
3 from pylearn import algorithms as models | |
4 import theano | |
5 import numpy | |
6 import time | |
7 | |
500 | 8 import pylearn.algorithms.logistic_regression |
493 | 9 |
10 def test_train_daa(mode = theano.Mode('c|py', 'fast_run')): | |
11 | |
12 ndaa = 3 | |
13 daa = models.Stacker([(models.SigmoidXEDenoisingAA, 'hidden')] * ndaa + [(models.BinRegressor, 'output')], | |
14 regularize = False) | |
15 | |
16 model = daa.make([4, 20, 20, 20, 1], | |
17 lr = 0.01, | |
18 mode = mode, | |
19 seed = 10) | |
20 | |
21 model.layers[0].noise_level = 0.3 | |
22 model.layers[1].noise_level = 0.3 | |
23 model.layers[2].noise_level = 0.3 | |
24 | |
25 # Update the first hidden layer | |
501
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
26 for l in range(3): |
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
27 for i in range(10): |
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
28 model.local_update[l]([[0, 1, 0, 1]]) |
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
29 model.local_update[l]([[1, 0, 1, 0]]) |
493 | 30 |
533
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
31 for i in range(10): |
501
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
32 model.update([[0, 1, 0, 1]], [[1]]) |
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
33 model.update([[1, 0, 1, 0]], [[0]]) |
493 | 34 print model.classify([[0, 1, 0, 1]]) |
501
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
35 print model.classify([[1, 0, 1, 0]]) |
493 | 36 |
37 | |
500 | 38 def test_train_daa2(mode = theano.Mode('c|py', 'fast_run')): |
39 | |
40 ndaa = 3 | |
501
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
41 daa = models.Stacker([(models.SigmoidXEDenoisingAA, 'hidden')] * ndaa + [(pylearn.algorithms.logistic_regression.Module_Nclass, 'pred')], |
500 | 42 regularize = False) |
43 | |
533
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
44 model = daa.make([4] + [20] * ndaa + [10], |
500 | 45 lr = 0.01, |
46 mode = mode, | |
47 seed = 10) | |
48 | |
533
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
49 for l in range(ndaa): model.layers[l].noise_level = 0.3 |
500 | 50 |
533
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
51 instances = [([[0, 1, 0, 1]], [1]), ([[1, 0, 1, 0]], [0])] |
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
52 |
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
53 for l in range(ndaa): |
501
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
54 for i in range(10): |
533
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
55 for (input, output) in instances: |
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
56 model.local_update[l](input) |
500 | 57 |
533
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
58 for i in range(10): |
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
59 for (input, output) in instances: |
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
60 # model.update(input, output) |
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
61 print "OLD:", |
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
62 print model.validate(input, output) |
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
63 oldloss = model.update(input, output) |
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
64 print oldloss |
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
65 print "NEW:" |
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
66 print model.validate(input, output) |
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
67 print |
de974b4fc4ea
Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents:
501
diff
changeset
|
68 |
501
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
69 print model.apply([[0, 1, 0, 1]]) |
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
70 print model.apply([[1, 0, 1, 0]]) |
500 | 71 |
493 | 72 |
73 | |
74 | |
75 if __name__ == '__main__': | |
76 # print 'optimized:' | |
77 # t1 = test_train_daa(theano.Mode('py', 'fast_compile')) | |
78 # t1 = test_train_daa(theano.Mode('c|py', 'fast_run')) | |
79 # print 'time:',t1 | |
80 # print | |
81 | |
82 # print 'not optimized:' | |
83 # t2 = test_train_daa(theano.Mode('c|py', 'fast_compile')) | |
84 ## print 'time:',t2 | |
85 | |
86 # test_train_daa(theano.compile.Mode('c&py', 'merge')) | |
501
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
87 # test_train_daa(theano.compile.Mode('c|py', 'merge')) |
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
88 test_train_daa(theano.compile.Mode('py', 'merge')) |
4fb6f7320518
N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents:
500
diff
changeset
|
89 |
500 | 90 test_train_daa2(theano.compile.Mode('c|py', 'merge')) |