# HG changeset patch # User Olivier Breuleux # Date 1225167833 14400 # Node ID 2c4738e5e4b2c906621a4038ffbb17065cce8111 # Parent b15dad843c8cf5a51af87b19483abd8b07f22e20# Parent 267ec8baef9fa3c2ad471a907b11639df3df99bf merge diff -r b15dad843c8c -r 2c4738e5e4b2 algorithms/sgd.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/algorithms/sgd.py Tue Oct 28 00:23:53 2008 -0400 @@ -0,0 +1,14 @@ + +from theano.compile import module +from theano import tensor as T + +class StochasticGradientDescent(module.FancyModule): + def __init__(self, params, gparams, lr=None): + super(StochasticGradientDescent, self).__init__() + + self.lr = lr if lr is not None else module.Member(T.dscalar()) + self.params = params + self.gparams = gparams + + self.updates = dict((p, p - self.lr * g) for p, g in zip(self.params, self.gparams)) + diff -r b15dad843c8c -r 2c4738e5e4b2 algorithms/tests/test_aa.py --- a/algorithms/tests/test_aa.py Tue Oct 28 00:23:13 2008 -0400 +++ b/algorithms/tests/test_aa.py Tue Oct 28 00:23:53 2008 -0400 @@ -1,5 +1,6 @@ +#from __future__ import absolute_imports -import models +from pylearn import algorithms as models import theano import numpy import time diff -r b15dad843c8c -r 2c4738e5e4b2 stopper.py --- a/stopper.py Tue Oct 28 00:23:13 2008 -0400 +++ b/stopper.py Tue Oct 28 00:23:53 2008 -0400 @@ -54,7 +54,7 @@ if stp.set_score: stp.score = check() if (stp.score < stp.best_score) and save: - best = save() + best = (save(), stp.iter, stp.score) return best