annotate algorithms/tests/test_daa.py @ 533:de974b4fc4ea

Bugfix in pylearn.embeddings.length()
author Joseph Turian <turian@gmail.com>
date Tue, 18 Nov 2008 03:25:54 -0500
parents 4fb6f7320518
children
rev   line source
493
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
1 #!/usr/bin/python
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
2
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
3 from pylearn import algorithms as models
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
4 import theano
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
5 import numpy
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
6 import time
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
7
500
3c60c2db0319 Added new daa test
Joseph Turian <turian@gmail.com>
parents: 493
diff changeset
8 import pylearn.algorithms.logistic_regression
493
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
9
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
10 def test_train_daa(mode = theano.Mode('c|py', 'fast_run')):
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
11
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
12 ndaa = 3
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
13 daa = models.Stacker([(models.SigmoidXEDenoisingAA, 'hidden')] * ndaa + [(models.BinRegressor, 'output')],
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
14 regularize = False)
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
15
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
16 model = daa.make([4, 20, 20, 20, 1],
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
17 lr = 0.01,
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
18 mode = mode,
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
19 seed = 10)
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
20
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
21 model.layers[0].noise_level = 0.3
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
22 model.layers[1].noise_level = 0.3
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
23 model.layers[2].noise_level = 0.3
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
24
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
25 # Update the first hidden layer
501
4fb6f7320518 N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents: 500
diff changeset
26 for l in range(3):
4fb6f7320518 N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents: 500
diff changeset
27 for i in range(10):
4fb6f7320518 N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents: 500
diff changeset
28 model.local_update[l]([[0, 1, 0, 1]])
4fb6f7320518 N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents: 500
diff changeset
29 model.local_update[l]([[1, 0, 1, 0]])
493
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
30
533
de974b4fc4ea Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents: 501
diff changeset
31 for i in range(10):
501
4fb6f7320518 N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents: 500
diff changeset
32 model.update([[0, 1, 0, 1]], [[1]])
4fb6f7320518 N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents: 500
diff changeset
33 model.update([[1, 0, 1, 0]], [[0]])
493
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
34 print model.classify([[0, 1, 0, 1]])
501
4fb6f7320518 N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents: 500
diff changeset
35 print model.classify([[1, 0, 1, 0]])
493
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
36
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
37
500
3c60c2db0319 Added new daa test
Joseph Turian <turian@gmail.com>
parents: 493
diff changeset
38 def test_train_daa2(mode = theano.Mode('c|py', 'fast_run')):
3c60c2db0319 Added new daa test
Joseph Turian <turian@gmail.com>
parents: 493
diff changeset
39
3c60c2db0319 Added new daa test
Joseph Turian <turian@gmail.com>
parents: 493
diff changeset
40 ndaa = 3
501
4fb6f7320518 N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents: 500
diff changeset
41 daa = models.Stacker([(models.SigmoidXEDenoisingAA, 'hidden')] * ndaa + [(pylearn.algorithms.logistic_regression.Module_Nclass, 'pred')],
500
3c60c2db0319 Added new daa test
Joseph Turian <turian@gmail.com>
parents: 493
diff changeset
42 regularize = False)
3c60c2db0319 Added new daa test
Joseph Turian <turian@gmail.com>
parents: 493
diff changeset
43
533
de974b4fc4ea Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents: 501
diff changeset
44 model = daa.make([4] + [20] * ndaa + [10],
500
3c60c2db0319 Added new daa test
Joseph Turian <turian@gmail.com>
parents: 493
diff changeset
45 lr = 0.01,
3c60c2db0319 Added new daa test
Joseph Turian <turian@gmail.com>
parents: 493
diff changeset
46 mode = mode,
3c60c2db0319 Added new daa test
Joseph Turian <turian@gmail.com>
parents: 493
diff changeset
47 seed = 10)
3c60c2db0319 Added new daa test
Joseph Turian <turian@gmail.com>
parents: 493
diff changeset
48
533
de974b4fc4ea Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents: 501
diff changeset
49 for l in range(ndaa): model.layers[l].noise_level = 0.3
500
3c60c2db0319 Added new daa test
Joseph Turian <turian@gmail.com>
parents: 493
diff changeset
50
533
de974b4fc4ea Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents: 501
diff changeset
51 instances = [([[0, 1, 0, 1]], [1]), ([[1, 0, 1, 0]], [0])]
de974b4fc4ea Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents: 501
diff changeset
52
de974b4fc4ea Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents: 501
diff changeset
53 for l in range(ndaa):
501
4fb6f7320518 N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents: 500
diff changeset
54 for i in range(10):
533
de974b4fc4ea Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents: 501
diff changeset
55 for (input, output) in instances:
de974b4fc4ea Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents: 501
diff changeset
56 model.local_update[l](input)
500
3c60c2db0319 Added new daa test
Joseph Turian <turian@gmail.com>
parents: 493
diff changeset
57
533
de974b4fc4ea Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents: 501
diff changeset
58 for i in range(10):
de974b4fc4ea Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents: 501
diff changeset
59 for (input, output) in instances:
de974b4fc4ea Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents: 501
diff changeset
60 # model.update(input, output)
de974b4fc4ea Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents: 501
diff changeset
61 print "OLD:",
de974b4fc4ea Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents: 501
diff changeset
62 print model.validate(input, output)
de974b4fc4ea Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents: 501
diff changeset
63 oldloss = model.update(input, output)
de974b4fc4ea Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents: 501
diff changeset
64 print oldloss
de974b4fc4ea Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents: 501
diff changeset
65 print "NEW:"
de974b4fc4ea Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents: 501
diff changeset
66 print model.validate(input, output)
de974b4fc4ea Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents: 501
diff changeset
67 print
de974b4fc4ea Bugfix in pylearn.embeddings.length()
Joseph Turian <turian@gmail.com>
parents: 501
diff changeset
68
501
4fb6f7320518 N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents: 500
diff changeset
69 print model.apply([[0, 1, 0, 1]])
4fb6f7320518 N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents: 500
diff changeset
70 print model.apply([[1, 0, 1, 0]])
500
3c60c2db0319 Added new daa test
Joseph Turian <turian@gmail.com>
parents: 493
diff changeset
71
493
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
72
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
73
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
74
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
75 if __name__ == '__main__':
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
76 # print 'optimized:'
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
77 # t1 = test_train_daa(theano.Mode('py', 'fast_compile'))
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
78 # t1 = test_train_daa(theano.Mode('c|py', 'fast_run'))
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
79 # print 'time:',t1
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
80 # print
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
81
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
82 # print 'not optimized:'
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
83 # t2 = test_train_daa(theano.Mode('c|py', 'fast_compile'))
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
84 ## print 'time:',t2
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
85
32509c479e2d Added test_daa.py
Joseph Turian <turian@gmail.com>
parents:
diff changeset
86 # test_train_daa(theano.compile.Mode('c&py', 'merge'))
501
4fb6f7320518 N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents: 500
diff changeset
87 # test_train_daa(theano.compile.Mode('c|py', 'merge'))
4fb6f7320518 N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents: 500
diff changeset
88 test_train_daa(theano.compile.Mode('py', 'merge'))
4fb6f7320518 N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents: 500
diff changeset
89
500
3c60c2db0319 Added new daa test
Joseph Turian <turian@gmail.com>
parents: 493
diff changeset
90 test_train_daa2(theano.compile.Mode('c|py', 'merge'))