# HG changeset patch # User James Bergstra # Date 1279304448 14400 # Node ID fdd648c7c58309353a29be9beeb21367b4b875cf # Parent 57ac1af9c17c5fca4fbefade6fc3ac4ad845a588 shared/layers/sgd removed cruft diff -r 57ac1af9c17c -r fdd648c7c583 pylearn/shared/layers/sgd.py --- a/pylearn/shared/layers/sgd.py Tue Nov 17 15:20:40 2009 -0500 +++ b/pylearn/shared/layers/sgd.py Fri Jul 16 14:20:48 2010 -0400 @@ -20,27 +20,17 @@ """ :param stepsize: the step to take in (negative) gradient direction :type stepsize: None, scalar value, or scalar TensorVariable - - :param updates: extra symbolic updates to make when evating either step or step_cost - (these override the gradients if necessary) - :type updates: dict Variable -> Variable - :param auxout: auxiliary outputs, list containing output symbols to - compute at the same time as cost (for efficiency) - :param methods: Should this module define the step and step_cost methods? """ if len(inputs) != len(gradients): raise ValueError('inputs list and gradients list must have same len') self.inputs = inputs - self.params = params - self.updates = updates = [] - self.outputs = outputs = [] - - for i, g in zip(inputs, gradients): - o = i - stepsize * g - outputs.append(o) - if hasattr(i, 'value'): # this is true for shared variables, false for most things. - updates.append((i, o)) + self.gradients = gradients + self.params = params # contains either nothing or the learning rate + self.outputs = [i - stepsize*g for (i,g) in zip(inputs, gradients)] + self.updates = [(input, self.outputs[i]) + for (i,input) in enumerate(self.inputs) + if hasattr(input, 'value')] # true for shared variables @classmethod def new(cls, inputs, cost, stepsize, dtype=None):