Mercurial > pylearn
diff learner.py @ 129:4c2280edcaf5
Fixed typos in learner.py
author | Yoshua Bengio <bengioy@iro.umontreal.ca> |
---|---|
date | Wed, 07 May 2008 21:22:56 -0400 |
parents | ee5507af2c60 |
children | 57e6492644ec |
line wrap: on
line diff
--- a/learner.py Wed May 07 20:51:24 2008 -0400 +++ b/learner.py Wed May 07 21:22:56 2008 -0400 @@ -1,6 +1,7 @@ -from dataset import AttributesHolder +from dataset import AttributesHolder,AbstractFunction import compile +from theano import tensor as t class Learner(AttributesHolder): """Base class for learning algorithms, provides an interface @@ -136,9 +137,9 @@ of (optionally copies) values of attributes. """ if return_copy: - return [copy.deepcopy(self.__getattr__(name).data) for name in names] + return [copy.deepcopy(self.__getattribute__(name).data) for name in names] else: - return [self.__getattr__(name).data for name in names] + return [self.__getattribute__(name).data for name in names] def updateInputAttributes(self): """ @@ -252,7 +253,7 @@ Private helper function that maps a list of attribute names to a list of corresponding Op Results (with the same name but with a '_' prefix). """ - return [self.__getattr__('_'+name).data for name in names] + return [self.__getattribute__('_'+name).data for name in names] class MinibatchUpdatesTLearner(TLearner): @@ -355,7 +356,7 @@ self.updateEnd() return self.use -class OnlineGradientBasedTLearner(MinibatchUpdatesTLearner): +class OnlineGradientTLearner(MinibatchUpdatesTLearner): """ Specialization of MinibatchUpdatesTLearner in which the minibatch updates are obtained by performing an online (minibatch-based) gradient step. @@ -376,14 +377,14 @@ self.truly_online=truly_online # create the formulas for the gradient update - old_params = [self.__getattr__("_"+name) for name in self.parameterAttributes()] + old_params = [self.__getattribute__("_"+name) for name in self.parameterAttributes()] new_params_names = ["_new_"+name for name in self.parameterAttributes()] - loss = self.__getattr__(self.lossAttribute()) + loss = self.__getattribute__("_"+self.lossAttribute()) self.setAttributes(new_params_names, - [t.add_inplace(self.param, - self._learning_rate*t.grad(loss,param)) + [t.add_inplace(param,self._learning_rate*t.grad(loss,param)) for param in old_params]) - + MinibatchUpdatesTLearner.__init__(self) + def isLastEpoch(self): return self.truly_online