diff learner.py @ 20:266c68cb6136

Minor editions, plus adding untested ApplyFunctionDataset for GradientLearner in the works.
author bengioy@bengiomac.local
date Mon, 07 Apr 2008 09:48:39 -0400
parents 5ede27026e05
children 90e4c0784d6e
line wrap: on
line diff
--- a/learner.py	Thu Mar 27 01:59:44 2008 -0400
+++ b/learner.py	Mon Apr 07 09:48:39 2008 -0400
@@ -36,20 +36,21 @@
         return self.use # default behavior is 'non-adaptive', i.e. update does not do anything
     
     
-    def __call__(self,training_set):
+    def __call__(self,training_set,train_stats_collector=None):
         """
         Train a learner from scratch using the provided training set,
         and return the learned function.
         """
         self.forget()
-        return self.update(learning_task)
+        return self.update(learning_task,train_stats_collector)
 
-
-    def use(self,input_dataset,output_fields=None):
+    def use(self,input_dataset,output_fields=None,copy_inputs=True):
         """Once a Learner has been trained by one or more call to 'update', it can
         be used with one or more calls to 'use'. The argument is a DataSet (possibly
         containing a single example) and the result is a DataSet of the same length.
         If output_fields is specified, it may be use to indicate which fields should
         be constructed in the output DataSet (for example ['output','classification_error']).
+        Optionally, if copy_inputs, the input fields (of the input_dataset) can be made
+        visible in the output DataSet returned by this function.
         """
         raise NotImplementedError