changeset 1167:7a8dcf87d780

Rename learn_meeting.py to API_learner.txt
author Pascal Lamblin <lamblinp@iro.umontreal.ca>
date Fri, 17 Sep 2010 13:57:46 -0400
parents ec1e93663656
children 77b6ed85d3f7
files doc/v2_planning/API_learner.txt doc/v2_planning/learn_meeting.py doc/v2_planning/learner.txt
diffstat 3 files changed, 77 insertions(+), 77 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/v2_planning/API_learner.txt	Fri Sep 17 13:57:46 2010 -0400
@@ -0,0 +1,76 @@
+
+
+def bagging(learner_factory):
+    for i in range(N):
+        learner_i = learner_factory.new()
+        # todo: get dataset_i ??
+        learner_i.use_dataset(dataset_i)
+        learner_i.train()
+'''
+ List of tasks types:
+  Attributes
+
+   sequential
+   spatial
+   structured
+   semi-supervised
+   missing-values
+
+
+  Supervised (x,y)
+
+   classification
+   regression
+   probabilistic classification
+   ranking
+   conditional density estimation
+   collaborative filtering
+   ordinal regression ?= ranking 
+
+  Unsupervised (x)
+
+   de-noising
+   feature learning ( transformation ) PCA, DAA
+   density estimation
+   inference
+
+  Other
+
+   generation (sampling)
+   structure learning ???
+
+
+Notes on metrics & statistics:
+   - some are applied to an example, others on a batch
+   - most statistics are on the dataset
+'''
+class Learner(Object):
+    
+    #def use_dataset(dataset)
+
+    # return a dictionary of hyperparameters names(keys)
+    # and value(values) 
+    def get_hyper_parameters()
+    def set_hyper_parameters(dictionary)
+
+
+    
+    
+    # Ver B
+    def eval(dataset)
+    def predict(dataset)
+
+    # Trainable
+    def train(dataset)   # train until complition
+
+    # Incremental
+    def use_dataset(dataset)
+    def adapt(n_steps =1)
+    def has_converged()
+
+    # 
+
+class HyperLearner(Learner):
+
+    ### def get_hyper_parameter_distribution(name)
+    def set_hyper_parameters_distribution(dictionary)
--- a/doc/v2_planning/learn_meeting.py	Fri Sep 17 12:56:43 2010 -0400
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,76 +0,0 @@
-
-
-def bagging(learner_factory):
-    for i in range(N):
-        learner_i = learner_factory.new()
-        # todo: get dataset_i ??
-        learner_i.use_dataset(dataset_i)
-        learner_i.train()
-'''
- List of tasks types:
-  Attributes
-
-   sequential
-   spatial
-   structured
-   semi-supervised
-   missing-values
-
-
-  Supervised (x,y)
-
-   classification
-   regression
-   probabilistic classification
-   ranking
-   conditional density estimation
-   collaborative filtering
-   ordinal regression ?= ranking 
-
-  Unsupervised (x)
-
-   de-noising
-   feature learning ( transformation ) PCA, DAA
-   density estimation
-   inference
-
-  Other
-
-   generation (sampling)
-   structure learning ???
-
-
-Notes on metrics & statistics:
-   - some are applied to an example, others on a batch
-   - most statistics are on the dataset
-'''
-class Learner(Object):
-    
-    #def use_dataset(dataset)
-
-    # return a dictionary of hyperparameters names(keys)
-    # and value(values) 
-    def get_hyper_parameters()
-    def set_hyper_parameters(dictionary)
-
-
-    
-    
-    # Ver B
-    def eval(dataset)
-    def predict(dataset)
-
-    # Trainable
-    def train(dataset)   # train until complition
-
-    # Incremental
-    def use_dataset(dataset)
-    def adapt(n_steps =1)
-    def has_converged()
-
-    # 
-
-class HyperLearner(Learner):
-
-    ### def get_hyper_parameter_distribution(name)
-    def set_hyper_parameters_distribution(dictionary)
--- a/doc/v2_planning/learner.txt	Fri Sep 17 12:56:43 2010 -0400
+++ b/doc/v2_planning/learner.txt	Fri Sep 17 13:57:46 2010 -0400
@@ -1,6 +1,6 @@
 
 Comittee: AB, PL, GM, IG, RP, NB, PV
-Leader: ?
+Leader: PL
 
 Discussion of Function Specification for Learner Types
 ======================================================