# HG changeset patch # User James Bergstra # Date 1238699322 14400 # Node ID 273d782b5a204b8a0ea7046e14b0e8e859ece0ac # Parent fec0ba6f8c8f86e1390635ea643c96721af6bee6 added exponential_mean diff -r fec0ba6f8c8f -r 273d782b5a20 pylearn/algorithms/exponential_mean.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/pylearn/algorithms/exponential_mean.py Thu Apr 02 15:08:42 2009 -0400 @@ -0,0 +1,141 @@ +"""Modules for maintaining statistics based on exponential decay""" +__docformat__ = "restructuredtext en" + +import copy +import numpy +import theano +import theano.tensor + +class ExponentialMean(theano.Module): + """Maintain an exponentially-decaying estimate of the mean + + This module computes the exact mean of the first `max_denom` values of `x`. + After the first `max_denom` values, it tracks the mean using the formula: + + :math:`self.curval = (1.0 - (1.0/max_denom)) * self.old_curval + (1.0/max_denom) * x` + + """ + + max_denom = None + """The average will be updated as if the current estimated average was estimated from at + most `max_denom-1` values.""" + + ival = None + """The initial value for the estimated average.""" + + def __init__(self, x, max_denom, ival): + """ + :param x: track the mean of this Variable + + :param max_denom: see `self.max_denom` + + :param ival: This should be a tensor of zeros with a shape that matches `x`'s runtime + value. + + """ + super(ExponentialMean, self).__init__() + + self.max_denom = max_denom + self.ival = ival + + if len(ival.shape) == 0: + x_type = theano.tensor.dscalar + elif len(ival.shape) == 1: + x_type = theano.tensor.dvector + elif len(ival.shape) == 2: + x_type = theano.tensor.dmatrix + else: + #TODO: x_type = theano.tensor.TensorType(...) + raise NotImplementedError() + + self.old_curval = (x_type()) + self.denom = (theano.tensor.lscalar()) + + alpha = 1.0 / self.denom + self.curval = (1.0 - alpha) * self.old_curval + alpha * x + + def updates(self): + """ + :returns: the symbolic updates necessary to refresh the mean estimate + :rtype: dict Variable -> Variable + """ + return { + self.old_curval: self.curval, + self.denom: theano.tensor.smallest(self.denom + 1, self.max_denom) + } + + def _instance_initialize(self, obj): + obj.denom = 1 + obj.old_curval = copy.copy(self.ival) + +def exp_mean(x, x_shape, max_denom=100): + """Return an `ExponentialMean` to track a Variable `x` with given shape + + :type x: Variable + :type x_shape: tuple + :type max_denom: int + + :rtype: ExponentialMean instance + """ + return ExponentialMean(x, + max_denom=max_denom, + ival=numpy.zeros(x_shape)) + +def exp_mean_sqr(x, x_shape, max_denom=100): + """Return an `ExponentialMean` to track a Variable `x`'s square with given shape + + :type x: Variable + :type x_shape: tuple + :type max_denom: int + + :rtype: ExponentialMean instance + """ + return ExponentialMean(x**2, + max_denom=max_denom, + ival=numpy.zeros(x_shape)) + +class exp_var(theano.Module): + """Module with interface similar to `ExponentialMean` for tracking elementwise variance + + """ + + mean = None + """`ExponentialMean`: current estimate of mean of `x` """ + + mean_sqr = None + """`ExponentialMean`: current estimate of mean sqr of `x` """ + + curval = None + """Variable: Current estimate of the variance of `x` """ + + def __init__(self, x, x_shape, max_denom=100): + """ + :param x: track the variance of this Variable + + :param max_denom: see `self.max_denom` + + :param ival: This should be a tensor of zeros with a shape that matches `x`'s runtime + value. + + """ + super(exp_var,self).__init__() + + self.mean = exp_mean(x, x_shape, max_denom) + self.mean_sqr = exp_mean_sqr(x, x_shape, max_denom) + + self.curval = self.mean_sqr.curval - self.mean.curval**2 + + def updates(self): + """ + :returns: the symbolic updates necessary to refresh the variance estimate + :rtype: dict Variable -> Variable + """ + rval = {} + rval.update(self.mean.updates()) + rval.update(self.mean_sqr.updates()) + return rval + + def _instance_initialize(self, obj): + obj.mean.initialize() + obj.mean_sqr.initialize() + diff -r fec0ba6f8c8f -r 273d782b5a20 pylearn/algorithms/tests/test_exponential_mean.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/pylearn/algorithms/tests/test_exponential_mean.py Thu Apr 02 15:08:42 2009 -0400 @@ -0,0 +1,50 @@ +import theano, numpy +from pylearn.algorithms import exponential_mean + +def test_mean(): + x = theano.tensor.dvector() + + rows_to_test = 10 + + D = exponential_mean.exp_mean(x, (4,), rows_to_test) + + D.f = theano.Method([x], D.curval, D.updates()) + d = D.make() + + rng = numpy.random.RandomState(3284) + xval = rng.randn(rows_to_test+3,4) + + for i, xrow in enumerate(xval): + dmean = d.f(xrow) + nmean = numpy.mean(xval[:i+1], axis=0) + + if i < rows_to_test: + assert numpy.allclose(dmean, nmean) + else: + assert not numpy.allclose(dmean, nmean) + + assert i > rows_to_test + +def test_var(): + x = theano.tensor.dvector() + + rows_to_test = 10 + + D = exponential_mean.exp_var(x, (4,), rows_to_test) + + D.f = theano.Method([x], D.curval, D.updates()) + d = D.make() + + rng = numpy.random.RandomState(3284) + xval = rng.randn(rows_to_test+3,4) + + for i, xrow in enumerate(xval): + dmean = d.f(xrow) + nmean = numpy.var(xval[:i+1], axis=0) + + if i < rows_to_test: + assert numpy.allclose(dmean, nmean) + else: + assert not numpy.allclose(dmean, nmean) + + assert i > rows_to_test