diff mlp_factory_approach.py @ 304:6ead65d30f1e

while learning using __call__, we can now set the early stopper
author Thierry Bertin-Mahieux <bertinmt@iro.umontreal.ca>
date Tue, 10 Jun 2008 17:16:49 -0400
parents eded3cb54930
children 93280a0c151a
line wrap: on
line diff
--- a/mlp_factory_approach.py	Tue Jun 10 11:07:20 2008 -0400
+++ b/mlp_factory_approach.py	Tue Jun 10 17:16:49 2008 -0400
@@ -5,7 +5,7 @@
 from theano import tensor as T
 
 import dataset, nnet_ops, stopper, filetensor
-from lookup_list import LookupList
+from pylearn.lookup_list import LookupList
 
 
 class AbstractFunction (Exception): pass
@@ -123,7 +123,7 @@
             return theano.gof.PerformLinker()
 
         def early_stopper(self):
-            stopper.NStages(10,1)
+            stopper.NStages(300,1)
         
         def train_iter(self, trainset):
             raise AbstractFunction
@@ -146,12 +146,13 @@
                 unpack_single=False,
                 optimizer=self.graph.optimizer,
                 linker=self.graph.linker() if hasattr(self.graph, 'linker')
-                else 'c&py')
+                else 'c|py')
 
     def __call__(self,
             trainset=None,
             validset=None,
-            iparams=None):
+            iparams=None,
+            stp=None):
         """Allocate and optionally train a model
 
         @param trainset: Data for minimizing the cost function
@@ -166,6 +167,9 @@
         @param target: name of field to use as target
         @type target: string
 
+        @param stp: early stopper, if None use default in graphMLP.G
+        @type stp: None or early stopper
+
         @return: model
         @rtype: GraphLearner.Model instance
         
@@ -184,17 +188,23 @@
         
         if trainset is not None: 
             #do some training by calling Model.update_minibatch()
-            stp = self.graph.early_stopper()
-            for mb in self.graph.train_iter(trainset):
-                curmodel.update_minibatch(mb)
-                if stp.set_score:
-                    if validset:
-                        stp.score = curmodel(validset, ['validset_score'])
-                        if (stp.score < stp.best_score):
-                            best = copy.copy(curmodel)
-                    else:
-                        stp.score = 0.0
-                stp.next()
+            if stp == None :
+                stp = self.graph.early_stopper()
+            try :
+                countiter = 0
+                for mb in self.graph.train_iter(trainset):
+                    curmodel.update_minibatch(mb)
+                    if stp.set_score:
+                        if validset:
+                            stp.score = curmodel(validset, ['validset_score'])
+                            if (stp.score < stp.best_score):
+                                best = copy.copy(curmodel)
+                        else:
+                            stp.score = 0.0
+                    countiter +=1 
+                    stp.next()
+            except StopIteration :
+                print 'Iterations stopped after ', countiter,' iterations'
             if validset:
                 curmodel = best
         return curmodel
@@ -278,8 +288,9 @@
 
         def train_iter(self, trainset):
             return trainset.minibatches(['input', 'target'],
-                    minibatch_size=min(len(trainset), 32), n_batches=300)
+                    minibatch_size=min(len(trainset), 32), n_batches=2000)
         def early_stopper(self): 
+            """ overwrites GraphLearner.graph function """
             return stopper.NStages(300,1)
 
     return G()