changeset 131:57e6492644ec

Automated merge with ssh://p-omega1@lgcm.iro.umontreal.ca/tlearn
author Yoshua Bengio <bengioy@iro.umontreal.ca>
date Wed, 07 May 2008 21:40:15 -0400
parents 3d8e40e7ed18 (current diff) 4c2280edcaf5 (diff)
children f6505ec32dc3
files learner.py
diffstat 2 files changed, 12 insertions(+), 11 deletions(-) [+]
line wrap: on
line diff
--- a/learner.py	Wed May 07 21:40:03 2008 -0400
+++ b/learner.py	Wed May 07 21:40:15 2008 -0400
@@ -1,6 +1,7 @@
 
 from dataset import AttributesHolder,AbstractFunction
 import compile
+from theano import tensor as t
     
 class Learner(AttributesHolder):
     """Base class for learning algorithms, provides an interface
@@ -136,9 +137,9 @@
         of (optionally copies) values of attributes.
         """
         if return_copy:
-            return [copy.deepcopy(self.__getattr__(name).data) for name in names]
+            return [copy.deepcopy(self.__getattribute__(name).data) for name in names]
         else:
-            return [self.__getattr__(name).data for name in names]
+            return [self.__getattribute__(name).data for name in names]
 
     def updateInputAttributes(self):
         """
@@ -252,7 +253,7 @@
         Private helper function that maps a list of attribute names to a list
         of corresponding Op Results (with the same name but with a '_' prefix).
         """
-        return [self.__getattr__('_'+name).data for name in names]
+        return [self.__getattribute__('_'+name).data for name in names]
 
 
 class MinibatchUpdatesTLearner(TLearner):
@@ -355,7 +356,7 @@
         self.updateEnd()
         return self.use
 
-class OnlineGradientBasedTLearner(MinibatchUpdatesTLearner):
+class OnlineGradientTLearner(MinibatchUpdatesTLearner):
     """
     Specialization of MinibatchUpdatesTLearner in which the minibatch updates
     are obtained by performing an online (minibatch-based) gradient step.
@@ -376,14 +377,14 @@
         self.truly_online=truly_online
 
         # create the formulas for the gradient update
-        old_params = [self.__getattr__("_"+name) for name in self.parameterAttributes()]
+        old_params = [self.__getattribute__("_"+name) for name in self.parameterAttributes()]
         new_params_names = ["_new_"+name for name in self.parameterAttributes()]
-        loss = self.__getattr__(self.lossAttribute())
+        loss = self.__getattribute__("_"+self.lossAttribute())
         self.setAttributes(new_params_names,
-                           [t.add_inplace(self.param,
-                                          self._learning_rate*t.grad(loss,param))
+                           [t.add_inplace(param,self._learning_rate*t.grad(loss,param))
                             for param in old_params])
-
+        MinibatchUpdatesTLearner.__init__(self)
+        
     def isLastEpoch(self):
         return self.truly_online
 
--- a/mlp.py	Wed May 07 21:40:03 2008 -0400
+++ b/mlp.py	Wed May 07 21:40:15 2008 -0400
@@ -7,7 +7,7 @@
 # the use of theano
 
 
-class OneHiddenLayerNNetClassifier(MinibatchUpdatesTLearner):
+class OneHiddenLayerNNetClassifier(OnlineGradientTLearner):
     """
     Implement a straightforward classicial feedforward
     one-hidden-layer neural net, with L2 regularization.
@@ -83,7 +83,7 @@
         self._output_class = t.argmax(self._output,1)
         self._class_error = self._output_class != self._target
         self._minibatch_criterion = self._nll + self._regularization_term / t.shape(self._input)[0]
-        MinibatchUpdatesTLearner.__init__(self)
+        OnlineGradientTLearner.__init__(self)
             
     def attributeNames(self):
         return ["parameters","b1","W2","b2","W2", "L2_regularizer","regularization_term"]