# HG changeset patch # User Razvan Pascanu # Date 1284251614 14400 # Node ID 8c448829db3040ee02a91932460270a1f23291e9 # Parent 65ac0f493830568d48182df51b5d01da5176bde3 learning committee first draft of an api diff -r 65ac0f493830 -r 8c448829db30 doc/v2_planning/learn_meeting.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/v2_planning/learn_meeting.py Sat Sep 11 20:33:34 2010 -0400 @@ -0,0 +1,76 @@ + + +def bagging(learner_factory): + for i in range(N): + learner_i = learner_factory.new() + # todo: get dataset_i ?? + learner_i.use_dataset(dataset_i) + learner_i.train() +''' + List of tasks types: + Attributes + + sequential + spatial + structured + semi-supervised + missing-values + + + Supervised (x,y) + + classification + regression + probabilistic classification + ranking + conditional density estimation + collaborative filtering + ordinal regression ?= ranking + + Unsupervised (x) + + de-noising + feature learning ( transformation ) PCA, DAA + density estimation + inference + + Other + + generation (sampling) + structure learning ??? + + +Notes on metrics & statistics: + - some are applied to an example, others on a batch + - most statistics are on the dataset +''' +class Learner(Object): + + #def use_dataset(dataset) + + # return a dictionary of hyperparameters names(keys) + # and value(values) + def get_hyper_parameters() + def set_hyper_parameters(dictionary) + + + + + # Ver B + def eval(dataset) + def predict(dataset) + + # Trainable + def train(dataset) # train until complition + + # Incremental + def use_dataset(dataset) + def adapt(n_steps =1) + def has_converged() + + # + +class HyperLearner(Learner): + + ### def get_hyper_parameter_distribution(name) + def set_hyper_parameters_distribution(dictionary)