Mercurial > pylearn
annotate pylearn/shared/layers/tests/test_kouh2008.py @ 1447:fbe470217937
Use .get_value() and .set_value() of shared instead of the .value property
author | Pascal Lamblin <lamblinp@iro.umontreal.ca> |
---|---|
date | Wed, 16 Mar 2011 20:20:02 -0400 |
parents | c635d1df51a1 |
children |
rev | line source |
---|---|
834 | 1 import numpy |
2 import theano.compile.debugmode | |
927
ffaf94da8100
make test faster in debug mode.
Frederic Bastien <nouiz@nouiz.org>
parents:
907
diff
changeset
|
3 from theano.compile.debugmode import DebugMode |
834 | 4 from theano import tensor |
893
912be602c3ac
fix import for the move of shared var outside of the sandbox.
Frederic Bastien <nouiz@nouiz.org>
parents:
834
diff
changeset
|
5 from theano.compile import pfunc |
834 | 6 from pylearn.shared.layers import LogisticRegression, Kouh2008 |
7 | |
8 def test_dtype(): | |
9 n_in = 10 | |
10 n_out = 10 | |
11 n_terms = 3 | |
12 rng = numpy.random.RandomState(23455) | |
907
6d4f98f86514
fix import and fix method name change. This uncover other change not reflected into the test.
Frederic Bastien <nouiz@nouiz.org>
parents:
893
diff
changeset
|
13 layer = Kouh2008.new_filters_expbounds(rng, tensor.dmatrix(), n_in, n_out, n_terms, dtype='float64') |
834 | 14 assert layer.output.dtype =='float64' |
907
6d4f98f86514
fix import and fix method name change. This uncover other change not reflected into the test.
Frederic Bastien <nouiz@nouiz.org>
parents:
893
diff
changeset
|
15 layer = Kouh2008.new_filters_expbounds(rng, tensor.fmatrix(), n_in, n_out, n_terms, dtype='float32') |
834 | 16 assert layer.output.dtype =='float32' |
17 | |
18 def run_w_random(bsize=10, n_iter=200, n_in = 1024, n_out = 100, n_terms=2, dtype='float64'): | |
927
ffaf94da8100
make test faster in debug mode.
Frederic Bastien <nouiz@nouiz.org>
parents:
907
diff
changeset
|
19 if isinstance(theano.compile.mode.get_default_mode(),DebugMode): |
ffaf94da8100
make test faster in debug mode.
Frederic Bastien <nouiz@nouiz.org>
parents:
907
diff
changeset
|
20 n_iter=2 |
ffaf94da8100
make test faster in debug mode.
Frederic Bastien <nouiz@nouiz.org>
parents:
907
diff
changeset
|
21 |
834 | 22 x = tensor.dmatrix() |
23 y = tensor.lvector() | |
24 rng = numpy.random.RandomState(23455) | |
25 | |
907
6d4f98f86514
fix import and fix method name change. This uncover other change not reflected into the test.
Frederic Bastien <nouiz@nouiz.org>
parents:
893
diff
changeset
|
26 layer = Kouh2008.new_filters_expbounds(rng, x, n_in, n_out, n_terms, dtype='float64') |
834 | 27 out = LogisticRegression.new(layer.output, n_out, 2) |
28 cost = out.nll(y).sum() | |
29 | |
30 #isolated optimization | |
31 for ii in xrange(len(layer.params)): | |
32 params = out.params+ [layer.params[ii]] | |
33 print 'PARAMS', params | |
34 updates = [(p, p - numpy.asarray(0.001, dtype=dtype)*gp) for p,gp in zip(params, tensor.grad(cost, params)) ] | |
35 print 'COMPILING' | |
36 f = pfunc([x, y], cost, updates=updates) | |
37 print 'DONE' | |
38 if False: | |
39 for i, n in enumerate(f.maker.env.toposort()): | |
40 print i, n | |
41 | |
42 xval = numpy.asarray(rng.rand(bsize, n_in), dtype=dtype) | |
43 yval = numpy.asarray(rng.randint(0,2,bsize), dtype='int64') | |
44 f0 = f(xval, yval) | |
45 for i in xrange(n_iter): | |
46 fN = f(xval, yval) | |
47 assert fN < f0 | |
48 f0 = fN | |
1378
c635d1df51a1
make failing test less verbose.
Frederic Bastien <nouiz@nouiz.org>
parents:
927
diff
changeset
|
49 #if 0 == i % 5: print i, 'rval', fN |
834 | 50 |
51 return fN | |
52 | |
53 def test_A(bsize=10, n_iter=2, n_in = 10, n_out = 10, n_terms=2, dtype='float64'): | |
54 | |
55 x = tensor.dmatrix() | |
56 y = tensor.lvector() | |
57 rng = numpy.random.RandomState(23455) | |
58 | |
907
6d4f98f86514
fix import and fix method name change. This uncover other change not reflected into the test.
Frederic Bastien <nouiz@nouiz.org>
parents:
893
diff
changeset
|
59 layer = Kouh2008.new_filters_expbounds(rng, x, n_in, n_out, n_terms, dtype='float64') |
834 | 60 out = LogisticRegression.new(layer.output, n_out, 2) |
61 cost = out.nll(y).sum() | |
62 #joint optimization except for one of the linear filters | |
1447
fbe470217937
Use .get_value() and .set_value() of shared instead of the .value property
Pascal Lamblin <lamblinp@iro.umontreal.ca>
parents:
1378
diff
changeset
|
63 out.w.set_value((out.w.get_value(borrow=True) + |
fbe470217937
Use .get_value() and .set_value() of shared instead of the .value property
Pascal Lamblin <lamblinp@iro.umontreal.ca>
parents:
1378
diff
changeset
|
64 0.1 * rng.rand(*out.w.get_value(borrow=True).shape)), |
fbe470217937
Use .get_value() and .set_value() of shared instead of the .value property
Pascal Lamblin <lamblinp@iro.umontreal.ca>
parents:
1378
diff
changeset
|
65 borrow=True) |
834 | 66 params = layer.params[:-2] |
67 mode = None | |
68 updates = [(p, p - numpy.asarray(0.001, dtype=dtype)*gp) for p,gp in zip(params, tensor.grad(cost, params)) ] | |
69 for p, newp in updates: | |
70 if p is layer.r: | |
71 theano.compile.debugmode.debugprint(newp, depth=5) | |
72 f = pfunc([x, y], [cost], mode, updates=updates) | |
73 env_r = f.maker.env.inputs[9] | |
74 order = f.maker.env.toposort() | |
75 | |
76 assert str(f.maker.env.outputs[6].owner.inputs[0]) == 'r' | |
77 assert str(f.maker.env.inputs[9]) == 'r' | |
78 assert f.maker.env.outputs[6].owner.inputs[0] is env_r | |
79 assert (f.maker.env.outputs[6].owner,0) in env_r.clients | |
80 | |
81 if False: | |
82 for i, n in enumerate(f.maker.env.toposort()): | |
83 print i, n, n.inputs | |
84 | |
85 xval = numpy.asarray(rng.rand(bsize, n_in), dtype=dtype) | |
86 yval = numpy.asarray(rng.randint(0,2,bsize), dtype='int64') | |
87 for i in xrange(n_iter): | |
88 fN = f(xval, yval) | |
89 if 0 == i: | |
90 f0 = fN | |
91 #if 0 == i % 5: print i, 'rval', fN | |
92 print i, 'rval', fN | |
93 for p0 in params: | |
94 for p1 in params: | |
95 assert p0 is p1 or not numpy.may_share_memory(p0.value, p1.value) | |
96 assert not numpy.may_share_memory(layer.r.value, xval) | |
97 print 'XVAL SUM', xval.sum(), layer.r.value.sum() | |
98 | |
99 assert f0 > 6 | |
100 assert fN < f0 # TODO: assert more improvement | |
101 | |
102 if __name__ == '__main__': | |
103 test_A() | |
104 | |
105 def test_smaller(): | |
927
ffaf94da8100
make test faster in debug mode.
Frederic Bastien <nouiz@nouiz.org>
parents:
907
diff
changeset
|
106 rval = run_w_random(n_in=10, n_out=8) |
ffaf94da8100
make test faster in debug mode.
Frederic Bastien <nouiz@nouiz.org>
parents:
907
diff
changeset
|
107 if not isinstance(theano.compile.mode.get_default_mode(),DebugMode): |
ffaf94da8100
make test faster in debug mode.
Frederic Bastien <nouiz@nouiz.org>
parents:
907
diff
changeset
|
108 assert rval < 6.1 |
834 | 109 |
110 def test_smaller32(): | |
927
ffaf94da8100
make test faster in debug mode.
Frederic Bastien <nouiz@nouiz.org>
parents:
907
diff
changeset
|
111 rval = run_w_random(n_in=10, n_out=8, dtype='float32') |
ffaf94da8100
make test faster in debug mode.
Frederic Bastien <nouiz@nouiz.org>
parents:
907
diff
changeset
|
112 if not isinstance(theano.compile.mode.get_default_mode(),DebugMode): |
ffaf94da8100
make test faster in debug mode.
Frederic Bastien <nouiz@nouiz.org>
parents:
907
diff
changeset
|
113 assert rval < 6.1 |
834 | 114 |
115 def test_big(): | |
927
ffaf94da8100
make test faster in debug mode.
Frederic Bastien <nouiz@nouiz.org>
parents:
907
diff
changeset
|
116 rval = run_w_random() |
ffaf94da8100
make test faster in debug mode.
Frederic Bastien <nouiz@nouiz.org>
parents:
907
diff
changeset
|
117 if not isinstance(theano.compile.mode.get_default_mode(),DebugMode): |
ffaf94da8100
make test faster in debug mode.
Frederic Bastien <nouiz@nouiz.org>
parents:
907
diff
changeset
|
118 assert rval < 0.1 |