# HG changeset patch # User James Bergstra # Date 1305730310 14400 # Node ID ddda8d93c162217426daae4e4dd7281c15e2c48f # Parent 281efa9a4463c1d12ff4a1cc1b5832711beccc4f dtype tweaks in sgd diff -r 281efa9a4463 -r ddda8d93c162 pylearn/gd/sgd.py --- a/pylearn/gd/sgd.py Wed May 18 10:51:11 2011 -0400 +++ b/pylearn/gd/sgd.py Wed May 18 10:51:50 2011 -0400 @@ -1,6 +1,6 @@ """A stochastic gradient descent minimizer. """ - +import numpy import theano def sgd_updates(params, grads, stepsizes): @@ -35,11 +35,11 @@ momentum = [momentum for p in params] if len(params) != len(grads): raise ValueError('params and grads have different lens') - headings = [theano.shared(p.get_value(borrow=False)*0) for p in params] + headings = [theano.shared(numpy.zeros_like(p.get_value(borrow=True))) for p in params] updates = [] for s, p, gp, m, h in zip(stepsizes, params, grads, momentum, headings): updates.append((p, p + s * h)) - updates.append((h, m*h - (1-m)*gp)) + updates.append((h, m*h - (1.0-m)*gp)) return updates