Mercurial > pylearn
changeset 744:4d22396678e6
typos in parameter names
author | bergstra@ip05.m |
---|---|
date | Mon, 01 Jun 2009 17:44:56 -0400 |
parents | 504ce30bbd2a |
children | fc85ce33b518 |
files | pylearn/algorithms/sgd.py pylearn/algorithms/stopper.py |
diffstat | 2 files changed, 3 insertions(+), 3 deletions(-) [+] |
line wrap: on
line diff
--- a/pylearn/algorithms/sgd.py Mon Jun 01 00:22:35 2009 -0400 +++ b/pylearn/algorithms/sgd.py Mon Jun 01 17:44:56 2009 -0400 @@ -56,7 +56,7 @@ :returns: standard minimizer constructor f(args, cost, params, gradient=None) """ - def f(args, cost, params, gradient=None, updates=None, auxout=None): - return StochasticGradientDescent(args, cost, params, gradient, stepsize, + def f(args, cost, params, gradients=None, updates=None, auxout=None): + return StochasticGradientDescent(args, cost, params, gradients=gradients, stepsize=stepsize, updates=updates, auxout=auxout) return f
--- a/pylearn/algorithms/stopper.py Mon Jun 01 00:22:35 2009 -0400 +++ b/pylearn/algorithms/stopper.py Mon Jun 01 17:44:56 2009 -0400 @@ -100,7 +100,7 @@ starting = self.iter < self.initial_wait waiting = self.iter < (self.patience * self.best_iter) - times_up = (time.time() - self.start_time) > self.hard_limit_second if self.hard_limit_second != None else False + times_up = (time.time() - self.start_time) > self.hard_limit_seconds if self.hard_limit_seconds != None else False if (starting or waiting) and not times_up: # continue to iterate self.iter += 1