Mercurial > pylearn
changeset 691:e69249897f89
added DynamicNormalizer to exponential_mean
author | James Bergstra <bergstrj@iro.umontreal.ca> |
---|---|
date | Thu, 14 May 2009 17:00:10 -0400 |
parents | 7d8bb6d087bc |
children | 5ca1a8e859db |
files | pylearn/algorithms/exponential_mean.py pylearn/algorithms/tests/test_exponential_mean.py |
diffstat | 2 files changed, 69 insertions(+), 0 deletions(-) [+] |
line wrap: on
line diff
--- a/pylearn/algorithms/exponential_mean.py Thu May 14 16:59:20 2009 -0400 +++ b/pylearn/algorithms/exponential_mean.py Thu May 14 17:00:10 2009 -0400 @@ -141,3 +141,25 @@ obj.mean.initialize() obj.mean_sqr.initialize() +class DynamicNormalizer(theano.Module): + """ + Normalizes `input` using geometric-decaying estimates of the mean and variance. The + `output` should mean near zero, and variance near 1. + """ + def __init__(self, input, input_shape, max_denom=100, eps=1.0e-8): + super(DynamicNormalizer, self).__init__() + self.input = input + self.d_mean = exp_mean(input, input_shape, max_denom=max_denom) + self.d_var = exp_var(input, input_shape, max_denom=max_denom) + self.output = (input - self.d_mean.curval) / theano.tensor.sqrt(self.d_var.curval+eps) + + def updates(self): + rval = {} + rval.update(self.d_mean.updates()) + rval.update(self.d_var.updates()) + return rval + + def _instance_initialize(self, obj): + obj.d_mean.initialize() + obj.d_var.initialize() +
--- a/pylearn/algorithms/tests/test_exponential_mean.py Thu May 14 16:59:20 2009 -0400 +++ b/pylearn/algorithms/tests/test_exponential_mean.py Thu May 14 17:00:10 2009 -0400 @@ -48,3 +48,50 @@ assert not numpy.allclose(dmean, nmean) assert i > rows_to_test + +def test_dynamic_normalizer(): + x = theano.tensor.dvector() + + rows_to_test = 100 + cols=2 + + D = exponential_mean.DynamicNormalizer(x, (cols,), rows_to_test) + + M = theano.Module() + M.dn = D + M.dn_mean = exponential_mean.exp_mean(D.output, (cols,), 50) + M.dn_var = exponential_mean.exp_var(D.output, (cols,), 50) + M.x_mean = exponential_mean.exp_mean(x, (cols,), 10) + + updates = D.updates() + #print len(updates) + updates.update(M.dn_mean.updates()) + #print len(updates) + updates.update(M.dn_var.updates()) + #print len(updates) + updates.update(M.x_mean.updates()) + #print len(updates) + + + + M.f = theano.Method([x], [D.output, M.dn_mean.curval, M.dn_var.curval, M.x_mean.curval] , updates) + + m = M.make() + m.dn.initialize() + m.dn_mean.initialize() + m.dn_var.initialize() + m.x_mean.initialize() + + + rng = numpy.random.RandomState(3284) + xval = rng.rand(rows_to_test+100,cols) + + for i, xrow in enumerate(xval): + n_x = m.f(xrow) + + #print n_x + + assert numpy.all(numpy.abs(n_x[1]) < 0.15) # the means should be close to 0 + assert numpy.all(numpy.abs(n_x[2]-1) < 0.07) # the variance should be close to 1.0 + assert i > rows_to_test +