# HG changeset patch # User James Bergstra # Date 1225142943 14400 # Node ID 1babf35fcef53874b233f9f065c1cea1dcfd193c # Parent fbfd3932fd0052df3f7e8670c068e3988acbb8a1# Parent 8fcd0f3d9a177c8c50bf0e726050cf8bedf82ebd merged diff -r 8fcd0f3d9a17 -r 1babf35fcef5 algorithms/sgd.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/algorithms/sgd.py Mon Oct 27 17:29:03 2008 -0400 @@ -0,0 +1,14 @@ + +from theano.compile import module +from theano import tensor as T + +class StochasticGradientDescent(module.FancyModule): + def __init__(self, params, gparams, lr=None): + super(StochasticGradientDescent, self).__init__() + + self.lr = lr if lr is not None else module.Member(T.dscalar()) + self.params = params + self.gparams = gparams + + self.updates = dict((p, p - self.lr * g) for p, g in zip(self.params, self.gparams)) + diff -r 8fcd0f3d9a17 -r 1babf35fcef5 stopper.py --- a/stopper.py Mon Oct 27 17:26:00 2008 -0400 +++ b/stopper.py Mon Oct 27 17:29:03 2008 -0400 @@ -54,7 +54,7 @@ if stp.set_score: stp.score = check() if (stp.score < stp.best_score) and save: - best = save() + best = (save(), stp.iter, stp.score) return best