# HG changeset patch # User James Bergstra # Date 1243988403 14400 # Node ID 88839ba37b97322db7f9cfc4724ea8dc09b46d97 # Parent b4aa46f856c1bc47d9b4cb0d653df4547211e400 added methods kwarg to sgd module diff -r b4aa46f856c1 -r 88839ba37b97 pylearn/algorithms/sgd.py --- a/pylearn/algorithms/sgd.py Tue Jun 02 20:19:30 2009 -0400 +++ b/pylearn/algorithms/sgd.py Tue Jun 02 20:20:03 2009 -0400 @@ -4,10 +4,16 @@ import theano class StochasticGradientDescent(theano.Module): - """Fixed stepsize gradient descent""" + """Fixed stepsize gradient descent + + Methods for gradient descent are: + - step(arg_vals) which returns None and updates the params + - step_cost(arg_vals) which returns the cost value, and updates the params + + """ def __init__(self, args, cost, params, gradients=None, stepsize=None, - updates=None, auxout=None): + updates=None, auxout=None, methods=True): """ :param stepsize: the step to take in (negative) gradient direction :type stepsize: None, scalar value, or scalar TensorVariable @@ -15,8 +21,9 @@ :param updates: extra symbolic updates to make when evating either step or step_cost (these override the gradients if necessary) :type updatess: dict Variable -> Variable - :type auxout: auxiliary outputs, list containing output symbols to + :param auxout: auxiliary outputs, list containing output symbols to compute at the same time as cost (for efficiency) + :param methods: Should this module define the step and step_cost methods? """ super(StochasticGradientDescent, self).__init__() self.stepsize_init = None @@ -38,13 +45,19 @@ if updates is not None: self._updates.update(updates) - auxout = auxout if auxout else [] - self.step = theano.Method( - args, auxout, - updates=self._updates) - self.step_cost = theano.Method( - args, [cost]+auxout, - updates=self._updates) + if methods: + if auxout is None: + self.step = theano.Method(args, [], updates=self._updates) + self.step_cost = theano.Method(args, cost, updates=self._updates) + else: + # step cost always returns a list if auxout + self.step = theano.Method( + args, [] + auxout, + updates=self._updates) + self.step_cost = theano.Method( + args, [cost]+auxout, + updates=self._updates) + updates = property(lambda self: self._updates.copy())