# HG changeset patch # User bergstra@ip05.m # Date 1243892696 14400 # Node ID 4d22396678e6003c295d2c12390e3398727f73b0 # Parent 504ce30bbd2a34c248f607dbf037cdbe20d95790 typos in parameter names diff -r 504ce30bbd2a -r 4d22396678e6 pylearn/algorithms/sgd.py --- a/pylearn/algorithms/sgd.py Mon Jun 01 00:22:35 2009 -0400 +++ b/pylearn/algorithms/sgd.py Mon Jun 01 17:44:56 2009 -0400 @@ -56,7 +56,7 @@ :returns: standard minimizer constructor f(args, cost, params, gradient=None) """ - def f(args, cost, params, gradient=None, updates=None, auxout=None): - return StochasticGradientDescent(args, cost, params, gradient, stepsize, + def f(args, cost, params, gradients=None, updates=None, auxout=None): + return StochasticGradientDescent(args, cost, params, gradients=gradients, stepsize=stepsize, updates=updates, auxout=auxout) return f diff -r 504ce30bbd2a -r 4d22396678e6 pylearn/algorithms/stopper.py --- a/pylearn/algorithms/stopper.py Mon Jun 01 00:22:35 2009 -0400 +++ b/pylearn/algorithms/stopper.py Mon Jun 01 17:44:56 2009 -0400 @@ -100,7 +100,7 @@ starting = self.iter < self.initial_wait waiting = self.iter < (self.patience * self.best_iter) - times_up = (time.time() - self.start_time) > self.hard_limit_second if self.hard_limit_second != None else False + times_up = (time.time() - self.start_time) > self.hard_limit_seconds if self.hard_limit_seconds != None else False if (starting or waiting) and not times_up: # continue to iterate self.iter += 1