annotate pylearn/shared/layers/tests/test_kouh2008.py @ 1447:fbe470217937

Use .get_value() and .set_value() of shared instead of the .value property
author Pascal Lamblin <lamblinp@iro.umontreal.ca>
date Wed, 16 Mar 2011 20:20:02 -0400
parents c635d1df51a1
children
rev   line source
834
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
1 import numpy
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
2 import theano.compile.debugmode
927
ffaf94da8100 make test faster in debug mode.
Frederic Bastien <nouiz@nouiz.org>
parents: 907
diff changeset
3 from theano.compile.debugmode import DebugMode
834
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
4 from theano import tensor
893
912be602c3ac fix import for the move of shared var outside of the sandbox.
Frederic Bastien <nouiz@nouiz.org>
parents: 834
diff changeset
5 from theano.compile import pfunc
834
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
6 from pylearn.shared.layers import LogisticRegression, Kouh2008
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
7
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
8 def test_dtype():
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
9 n_in = 10
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
10 n_out = 10
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
11 n_terms = 3
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
12 rng = numpy.random.RandomState(23455)
907
6d4f98f86514 fix import and fix method name change. This uncover other change not reflected into the test.
Frederic Bastien <nouiz@nouiz.org>
parents: 893
diff changeset
13 layer = Kouh2008.new_filters_expbounds(rng, tensor.dmatrix(), n_in, n_out, n_terms, dtype='float64')
834
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
14 assert layer.output.dtype =='float64'
907
6d4f98f86514 fix import and fix method name change. This uncover other change not reflected into the test.
Frederic Bastien <nouiz@nouiz.org>
parents: 893
diff changeset
15 layer = Kouh2008.new_filters_expbounds(rng, tensor.fmatrix(), n_in, n_out, n_terms, dtype='float32')
834
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
16 assert layer.output.dtype =='float32'
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
17
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
18 def run_w_random(bsize=10, n_iter=200, n_in = 1024, n_out = 100, n_terms=2, dtype='float64'):
927
ffaf94da8100 make test faster in debug mode.
Frederic Bastien <nouiz@nouiz.org>
parents: 907
diff changeset
19 if isinstance(theano.compile.mode.get_default_mode(),DebugMode):
ffaf94da8100 make test faster in debug mode.
Frederic Bastien <nouiz@nouiz.org>
parents: 907
diff changeset
20 n_iter=2
ffaf94da8100 make test faster in debug mode.
Frederic Bastien <nouiz@nouiz.org>
parents: 907
diff changeset
21
834
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
22 x = tensor.dmatrix()
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
23 y = tensor.lvector()
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
24 rng = numpy.random.RandomState(23455)
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
25
907
6d4f98f86514 fix import and fix method name change. This uncover other change not reflected into the test.
Frederic Bastien <nouiz@nouiz.org>
parents: 893
diff changeset
26 layer = Kouh2008.new_filters_expbounds(rng, x, n_in, n_out, n_terms, dtype='float64')
834
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
27 out = LogisticRegression.new(layer.output, n_out, 2)
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
28 cost = out.nll(y).sum()
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
29
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
30 #isolated optimization
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
31 for ii in xrange(len(layer.params)):
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
32 params = out.params+ [layer.params[ii]]
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
33 print 'PARAMS', params
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
34 updates = [(p, p - numpy.asarray(0.001, dtype=dtype)*gp) for p,gp in zip(params, tensor.grad(cost, params)) ]
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
35 print 'COMPILING'
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
36 f = pfunc([x, y], cost, updates=updates)
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
37 print 'DONE'
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
38 if False:
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
39 for i, n in enumerate(f.maker.env.toposort()):
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
40 print i, n
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
41
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
42 xval = numpy.asarray(rng.rand(bsize, n_in), dtype=dtype)
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
43 yval = numpy.asarray(rng.randint(0,2,bsize), dtype='int64')
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
44 f0 = f(xval, yval)
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
45 for i in xrange(n_iter):
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
46 fN = f(xval, yval)
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
47 assert fN < f0
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
48 f0 = fN
1378
c635d1df51a1 make failing test less verbose.
Frederic Bastien <nouiz@nouiz.org>
parents: 927
diff changeset
49 #if 0 == i % 5: print i, 'rval', fN
834
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
50
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
51 return fN
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
52
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
53 def test_A(bsize=10, n_iter=2, n_in = 10, n_out = 10, n_terms=2, dtype='float64'):
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
54
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
55 x = tensor.dmatrix()
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
56 y = tensor.lvector()
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
57 rng = numpy.random.RandomState(23455)
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
58
907
6d4f98f86514 fix import and fix method name change. This uncover other change not reflected into the test.
Frederic Bastien <nouiz@nouiz.org>
parents: 893
diff changeset
59 layer = Kouh2008.new_filters_expbounds(rng, x, n_in, n_out, n_terms, dtype='float64')
834
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
60 out = LogisticRegression.new(layer.output, n_out, 2)
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
61 cost = out.nll(y).sum()
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
62 #joint optimization except for one of the linear filters
1447
fbe470217937 Use .get_value() and .set_value() of shared instead of the .value property
Pascal Lamblin <lamblinp@iro.umontreal.ca>
parents: 1378
diff changeset
63 out.w.set_value((out.w.get_value(borrow=True) +
fbe470217937 Use .get_value() and .set_value() of shared instead of the .value property
Pascal Lamblin <lamblinp@iro.umontreal.ca>
parents: 1378
diff changeset
64 0.1 * rng.rand(*out.w.get_value(borrow=True).shape)),
fbe470217937 Use .get_value() and .set_value() of shared instead of the .value property
Pascal Lamblin <lamblinp@iro.umontreal.ca>
parents: 1378
diff changeset
65 borrow=True)
834
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
66 params = layer.params[:-2]
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
67 mode = None
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
68 updates = [(p, p - numpy.asarray(0.001, dtype=dtype)*gp) for p,gp in zip(params, tensor.grad(cost, params)) ]
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
69 for p, newp in updates:
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
70 if p is layer.r:
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
71 theano.compile.debugmode.debugprint(newp, depth=5)
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
72 f = pfunc([x, y], [cost], mode, updates=updates)
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
73 env_r = f.maker.env.inputs[9]
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
74 order = f.maker.env.toposort()
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
75
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
76 assert str(f.maker.env.outputs[6].owner.inputs[0]) == 'r'
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
77 assert str(f.maker.env.inputs[9]) == 'r'
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
78 assert f.maker.env.outputs[6].owner.inputs[0] is env_r
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
79 assert (f.maker.env.outputs[6].owner,0) in env_r.clients
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
80
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
81 if False:
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
82 for i, n in enumerate(f.maker.env.toposort()):
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
83 print i, n, n.inputs
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
84
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
85 xval = numpy.asarray(rng.rand(bsize, n_in), dtype=dtype)
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
86 yval = numpy.asarray(rng.randint(0,2,bsize), dtype='int64')
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
87 for i in xrange(n_iter):
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
88 fN = f(xval, yval)
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
89 if 0 == i:
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
90 f0 = fN
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
91 #if 0 == i % 5: print i, 'rval', fN
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
92 print i, 'rval', fN
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
93 for p0 in params:
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
94 for p1 in params:
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
95 assert p0 is p1 or not numpy.may_share_memory(p0.value, p1.value)
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
96 assert not numpy.may_share_memory(layer.r.value, xval)
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
97 print 'XVAL SUM', xval.sum(), layer.r.value.sum()
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
98
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
99 assert f0 > 6
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
100 assert fN < f0 # TODO: assert more improvement
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
101
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
102 if __name__ == '__main__':
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
103 test_A()
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
104
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
105 def test_smaller():
927
ffaf94da8100 make test faster in debug mode.
Frederic Bastien <nouiz@nouiz.org>
parents: 907
diff changeset
106 rval = run_w_random(n_in=10, n_out=8)
ffaf94da8100 make test faster in debug mode.
Frederic Bastien <nouiz@nouiz.org>
parents: 907
diff changeset
107 if not isinstance(theano.compile.mode.get_default_mode(),DebugMode):
ffaf94da8100 make test faster in debug mode.
Frederic Bastien <nouiz@nouiz.org>
parents: 907
diff changeset
108 assert rval < 6.1
834
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
109
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
110 def test_smaller32():
927
ffaf94da8100 make test faster in debug mode.
Frederic Bastien <nouiz@nouiz.org>
parents: 907
diff changeset
111 rval = run_w_random(n_in=10, n_out=8, dtype='float32')
ffaf94da8100 make test faster in debug mode.
Frederic Bastien <nouiz@nouiz.org>
parents: 907
diff changeset
112 if not isinstance(theano.compile.mode.get_default_mode(),DebugMode):
ffaf94da8100 make test faster in debug mode.
Frederic Bastien <nouiz@nouiz.org>
parents: 907
diff changeset
113 assert rval < 6.1
834
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
114
580087712f69 added shared.layers
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
115 def test_big():
927
ffaf94da8100 make test faster in debug mode.
Frederic Bastien <nouiz@nouiz.org>
parents: 907
diff changeset
116 rval = run_w_random()
ffaf94da8100 make test faster in debug mode.
Frederic Bastien <nouiz@nouiz.org>
parents: 907
diff changeset
117 if not isinstance(theano.compile.mode.get_default_mode(),DebugMode):
ffaf94da8100 make test faster in debug mode.
Frederic Bastien <nouiz@nouiz.org>
parents: 907
diff changeset
118 assert rval < 0.1