Mercurial > pylearn
diff pylearn/gd/sgd.py @ 1474:a57f4839a9d8
merge
author | James Bergstra <bergstrj@iro.umontreal.ca> |
---|---|
date | Wed, 18 May 2011 10:52:42 -0400 |
parents | ddda8d93c162 cac29ca79a74 |
children | 0e6ca7eecc72 |
line wrap: on
line diff
--- a/pylearn/gd/sgd.py Wed May 18 10:52:22 2011 -0400 +++ b/pylearn/gd/sgd.py Wed May 18 10:52:42 2011 -0400 @@ -16,7 +16,7 @@ """ try: iter(stepsizes) - except: + except Exception: stepsizes = [stepsizes for p in params] if len(params) != len(grads): raise ValueError('params and grads have different lens') @@ -27,11 +27,11 @@ # if stepsizes is just a scalar, expand it to match params try: iter(stepsizes) - except: + except Exception: stepsizes = [stepsizes for p in params] try: iter(momentum) - except: + except Exception: momentum = [momentum for p in params] if len(params) != len(grads): raise ValueError('params and grads have different lens') @@ -79,7 +79,8 @@ raise TypeError('stepsize must be a scalar', stepsize) self.params = params - self.gparams = theano.tensor.grad(cost, self.params) if gradients is None else gradients + self.gparams = [theano.tensor.grad(cost, self.params)] if gradients is None else gradients + assert len(self.params) == len(self.gparams) self._updates = (dict((p, p - self.stepsize * g) for p, g in zip(self.params, self.gparams))) if updates is not None: