Mercurial > pylearn
diff learner.py @ 131:57e6492644ec
Automated merge with ssh://p-omega1@lgcm.iro.umontreal.ca/tlearn
author | Yoshua Bengio <bengioy@iro.umontreal.ca> |
---|---|
date | Wed, 07 May 2008 21:40:15 -0400 |
parents | 3d8e40e7ed18 4c2280edcaf5 |
children | f6505ec32dc3 |
line wrap: on
line diff
--- a/learner.py Wed May 07 21:40:03 2008 -0400 +++ b/learner.py Wed May 07 21:40:15 2008 -0400 @@ -1,6 +1,7 @@ from dataset import AttributesHolder,AbstractFunction import compile +from theano import tensor as t class Learner(AttributesHolder): """Base class for learning algorithms, provides an interface @@ -136,9 +137,9 @@ of (optionally copies) values of attributes. """ if return_copy: - return [copy.deepcopy(self.__getattr__(name).data) for name in names] + return [copy.deepcopy(self.__getattribute__(name).data) for name in names] else: - return [self.__getattr__(name).data for name in names] + return [self.__getattribute__(name).data for name in names] def updateInputAttributes(self): """ @@ -252,7 +253,7 @@ Private helper function that maps a list of attribute names to a list of corresponding Op Results (with the same name but with a '_' prefix). """ - return [self.__getattr__('_'+name).data for name in names] + return [self.__getattribute__('_'+name).data for name in names] class MinibatchUpdatesTLearner(TLearner): @@ -355,7 +356,7 @@ self.updateEnd() return self.use -class OnlineGradientBasedTLearner(MinibatchUpdatesTLearner): +class OnlineGradientTLearner(MinibatchUpdatesTLearner): """ Specialization of MinibatchUpdatesTLearner in which the minibatch updates are obtained by performing an online (minibatch-based) gradient step. @@ -376,14 +377,14 @@ self.truly_online=truly_online # create the formulas for the gradient update - old_params = [self.__getattr__("_"+name) for name in self.parameterAttributes()] + old_params = [self.__getattribute__("_"+name) for name in self.parameterAttributes()] new_params_names = ["_new_"+name for name in self.parameterAttributes()] - loss = self.__getattr__(self.lossAttribute()) + loss = self.__getattribute__("_"+self.lossAttribute()) self.setAttributes(new_params_names, - [t.add_inplace(self.param, - self._learning_rate*t.grad(loss,param)) + [t.add_inplace(param,self._learning_rate*t.grad(loss,param)) for param in old_params]) - + MinibatchUpdatesTLearner.__init__(self) + def isLastEpoch(self): return self.truly_online