Mercurial > pylearn
comparison doc/v2_planning/API_learner.txt @ 1174:fe6c25eb1e37
merge
author | pascanur |
---|---|
date | Fri, 17 Sep 2010 16:13:58 -0400 |
parents | 77b6ed85d3f7 |
children | 805e7c369fd1 |
comparison
equal
deleted
inserted
replaced
1173:a0f178bc9052 | 1174:fe6c25eb1e37 |
---|---|
1 # A list of "task types" | |
2 | |
3 ''' | |
4 List of tasks types: | |
5 Attributes | |
6 | |
7 sequential | |
8 spatial | |
9 structured | |
10 semi-supervised | |
11 missing-values | |
12 | |
13 | |
14 Supervised (x,y) | |
15 | |
16 classification | |
17 regression | |
18 probabilistic classification | |
19 ranking | |
20 conditional density estimation | |
21 collaborative filtering | |
22 ordinal regression ?= ranking | |
23 | |
24 Unsupervised (x) | |
25 | |
26 de-noising | |
27 feature learning ( transformation ) PCA, DAA | |
28 density estimation | |
29 inference | |
30 | |
31 Other | |
32 | |
33 generation (sampling) | |
34 structure learning ??? | |
35 | |
36 | |
37 Notes on metrics & statistics: | |
38 - some are applied to an example, others on a batch | |
39 - most statistics are on the dataset | |
40 ''' | |
41 | |
42 | |
43 class Learner(Object): | |
44 ''' | |
45 Takes data as inputs, and learns a prediction function (or several). | |
46 | |
47 A learner is parametrized by hyper-parameters, which can be set from the | |
48 outside (a "client" from Learner, that can be a HyperLearner, a | |
49 Tester,...). | |
50 | |
51 The data can be given all at a time as a data set, or incrementally. | |
52 Some learner need to be fully trained in one step, whereas other can be | |
53 trained incrementally. | |
54 | |
55 The question of statistics collection during training remains open. | |
56 ''' | |
57 #def use_dataset(dataset) | |
58 | |
59 # return a dictionary of hyperparameters names(keys) | |
60 # and value(values) | |
61 def get_hyper_parameters() | |
62 def set_hyper_parameters(dictionary) | |
63 | |
64 | |
65 | |
66 | |
67 # Ver B | |
68 def eval(dataset) | |
69 def predict(dataset) | |
70 | |
71 # Trainable | |
72 def train(dataset) # train until complition | |
73 | |
74 # Incremental | |
75 def use_dataset(dataset) | |
76 def adapt(n_steps =1) | |
77 def has_converged() | |
78 | |
79 # | |
80 | |
81 | |
82 # Some example cases | |
83 | |
84 class HyperLearner(Learner): | |
85 | |
86 ### def get_hyper_parameter_distribution(name) | |
87 def set_hyper_parameters_distribution(dictionary) | |
88 | |
89 | |
90 def bagging(learner_factory): | |
91 for i in range(N): | |
92 learner_i = learner_factory.new() | |
93 # todo: get dataset_i ?? | |
94 learner_i.use_dataset(dataset_i) | |
95 learner_i.train() |