view algorithms/sgd.py @ 496:f13847478c6d

A few more ideas, in comments
author Joseph Turian <turian@gmail.com>
date Tue, 28 Oct 2008 12:09:49 -0400
parents fbfd3932fd00
children
line wrap: on
line source


from theano.compile import module
from theano import tensor as T

class StochasticGradientDescent(module.FancyModule):
    def __init__(self, params, gparams, lr=None):
        super(StochasticGradientDescent, self).__init__()

        self.lr = lr if lr is not None else module.Member(T.dscalar())
        self.params = params
        self.gparams = gparams

        self.updates = dict((p, p - self.lr * g) for p, g in zip(self.params, self.gparams))