comparison pylearn/shared/layers/tests/test_kouh2008.py @ 1447:fbe470217937

Use .get_value() and .set_value() of shared instead of the .value property
author Pascal Lamblin <lamblinp@iro.umontreal.ca>
date Wed, 16 Mar 2011 20:20:02 -0400
parents c635d1df51a1
children
comparison
equal deleted inserted replaced
1446:6e50d209b5f1 1447:fbe470217937
58 58
59 layer = Kouh2008.new_filters_expbounds(rng, x, n_in, n_out, n_terms, dtype='float64') 59 layer = Kouh2008.new_filters_expbounds(rng, x, n_in, n_out, n_terms, dtype='float64')
60 out = LogisticRegression.new(layer.output, n_out, 2) 60 out = LogisticRegression.new(layer.output, n_out, 2)
61 cost = out.nll(y).sum() 61 cost = out.nll(y).sum()
62 #joint optimization except for one of the linear filters 62 #joint optimization except for one of the linear filters
63 out.w.value += 0.1 * rng.rand(*out.w.value.shape) 63 out.w.set_value((out.w.get_value(borrow=True) +
64 0.1 * rng.rand(*out.w.get_value(borrow=True).shape)),
65 borrow=True)
64 params = layer.params[:-2] 66 params = layer.params[:-2]
65 mode = None 67 mode = None
66 updates = [(p, p - numpy.asarray(0.001, dtype=dtype)*gp) for p,gp in zip(params, tensor.grad(cost, params)) ] 68 updates = [(p, p - numpy.asarray(0.001, dtype=dtype)*gp) for p,gp in zip(params, tensor.grad(cost, params)) ]
67 for p, newp in updates: 69 for p, newp in updates:
68 if p is layer.r: 70 if p is layer.r: