Mercurial > pylearn
diff doc/v2_planning/learn_meeting.py @ 1087:8c448829db30
learning committee first draft of an api
author | Razvan Pascanu <r.pascanu@gmail.com> |
---|---|
date | Sat, 11 Sep 2010 20:33:34 -0400 |
parents | |
children |
line wrap: on
line diff
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/v2_planning/learn_meeting.py Sat Sep 11 20:33:34 2010 -0400 @@ -0,0 +1,76 @@ + + +def bagging(learner_factory): + for i in range(N): + learner_i = learner_factory.new() + # todo: get dataset_i ?? + learner_i.use_dataset(dataset_i) + learner_i.train() +''' + List of tasks types: + Attributes + + sequential + spatial + structured + semi-supervised + missing-values + + + Supervised (x,y) + + classification + regression + probabilistic classification + ranking + conditional density estimation + collaborative filtering + ordinal regression ?= ranking + + Unsupervised (x) + + de-noising + feature learning ( transformation ) PCA, DAA + density estimation + inference + + Other + + generation (sampling) + structure learning ??? + + +Notes on metrics & statistics: + - some are applied to an example, others on a batch + - most statistics are on the dataset +''' +class Learner(Object): + + #def use_dataset(dataset) + + # return a dictionary of hyperparameters names(keys) + # and value(values) + def get_hyper_parameters() + def set_hyper_parameters(dictionary) + + + + + # Ver B + def eval(dataset) + def predict(dataset) + + # Trainable + def train(dataset) # train until complition + + # Incremental + def use_dataset(dataset) + def adapt(n_steps =1) + def has_converged() + + # + +class HyperLearner(Learner): + + ### def get_hyper_parameter_distribution(name) + def set_hyper_parameters_distribution(dictionary)