Mercurial > pylearn
annotate sandbox/gradient_learner.py @ 466:23221eefb70e
Added pylearn.sandbox.weights.random_weights
author | Joseph Turian <turian@iro.umontreal.ca> |
---|---|
date | Wed, 15 Oct 2008 18:59:55 -0400 |
parents | d7611a3811f2 |
children |
rev | line source |
---|---|
13
633453635d51
Starting to work on gradient_based_learner.py
bengioy@bengiomac.local
parents:
diff
changeset
|
1 |
633453635d51
Starting to work on gradient_based_learner.py
bengioy@bengiomac.local
parents:
diff
changeset
|
2 from learner import * |
633453635d51
Starting to work on gradient_based_learner.py
bengioy@bengiomac.local
parents:
diff
changeset
|
3 from tensor import * |
633453635d51
Starting to work on gradient_based_learner.py
bengioy@bengiomac.local
parents:
diff
changeset
|
4 import gradient |
633453635d51
Starting to work on gradient_based_learner.py
bengioy@bengiomac.local
parents:
diff
changeset
|
5 from compile import Function |
633453635d51
Starting to work on gradient_based_learner.py
bengioy@bengiomac.local
parents:
diff
changeset
|
6 |
633453635d51
Starting to work on gradient_based_learner.py
bengioy@bengiomac.local
parents:
diff
changeset
|
7 class GradientLearner(Learner): |
633453635d51
Starting to work on gradient_based_learner.py
bengioy@bengiomac.local
parents:
diff
changeset
|
8 """ |
14 | 9 Base class for gradient-based optimization of a training criterion |
13
633453635d51
Starting to work on gradient_based_learner.py
bengioy@bengiomac.local
parents:
diff
changeset
|
10 that can consist in two parts, an additive part over examples, and |
633453635d51
Starting to work on gradient_based_learner.py
bengioy@bengiomac.local
parents:
diff
changeset
|
11 an example-independent part (usually called the regularizer). |
23
526e192b0699
Working on ApplyFunctionDataSet, added constraint that
bengioy@esprit.iro.umontreal.ca
parents:
20
diff
changeset
|
12 The user provides a Theano formula that maps the fields of a minibatch (each being a tensor with the |
526e192b0699
Working on ApplyFunctionDataSet, added constraint that
bengioy@esprit.iro.umontreal.ca
parents:
20
diff
changeset
|
13 same number of rows = minibatch size) and parameters to output fields (for the use function), one of which |
526e192b0699
Working on ApplyFunctionDataSet, added constraint that
bengioy@esprit.iro.umontreal.ca
parents:
20
diff
changeset
|
14 must be a cost that is the training criterion to be minimized. Subclasses implement |
20
266c68cb6136
Minor editions, plus adding untested ApplyFunctionDataset for GradientLearner in the works.
bengioy@bengiomac.local
parents:
14
diff
changeset
|
15 a training strategy that uses the Theano formula to compute gradients and |
14 | 16 to compute outputs in the update method. |
17 The inputs, parameters, and outputs are lists of Theano tensors, | |
13
633453635d51
Starting to work on gradient_based_learner.py
bengioy@bengiomac.local
parents:
diff
changeset
|
18 while the example_wise_cost and regularization_term are Theano tensors. |
633453635d51
Starting to work on gradient_based_learner.py
bengioy@bengiomac.local
parents:
diff
changeset
|
19 The user can specify a regularization coefficient that multiplies the regularization term. |
633453635d51
Starting to work on gradient_based_learner.py
bengioy@bengiomac.local
parents:
diff
changeset
|
20 The training algorithm looks for parameters that minimize |
20
266c68cb6136
Minor editions, plus adding untested ApplyFunctionDataset for GradientLearner in the works.
bengioy@bengiomac.local
parents:
14
diff
changeset
|
21 regularization_coefficient * regularization_term(parameters) + |
13
633453635d51
Starting to work on gradient_based_learner.py
bengioy@bengiomac.local
parents:
diff
changeset
|
22 sum_{inputs in training_set} example_wise_cost(inputs,parameters) |
633453635d51
Starting to work on gradient_based_learner.py
bengioy@bengiomac.local
parents:
diff
changeset
|
23 i.e. the regularization_term should not depend on the inputs, only on the parameters. |
633453635d51
Starting to work on gradient_based_learner.py
bengioy@bengiomac.local
parents:
diff
changeset
|
24 The learned function can map a subset of inputs to a subset of outputs (as long as the inputs subset |
633453635d51
Starting to work on gradient_based_learner.py
bengioy@bengiomac.local
parents:
diff
changeset
|
25 includes all the inputs required in the Theano expression for the selected outputs). |
20
266c68cb6136
Minor editions, plus adding untested ApplyFunctionDataset for GradientLearner in the works.
bengioy@bengiomac.local
parents:
14
diff
changeset
|
26 It is assumed that all the inputs are provided in the training set (as dataset fields |
266c68cb6136
Minor editions, plus adding untested ApplyFunctionDataset for GradientLearner in the works.
bengioy@bengiomac.local
parents:
14
diff
changeset
|
27 with the corresponding name), but not necessarily when using the learned function. |
13
633453635d51
Starting to work on gradient_based_learner.py
bengioy@bengiomac.local
parents:
diff
changeset
|
28 """ |
26
672fe4b23032
Fixed dataset errors so that _test_dataset.py works again.
bengioy@grenat.iro.umontreal.ca
parents:
23
diff
changeset
|
29 def __init__(self, inputs, parameters, outputs, example_wise_cost, regularization_term=astensor(0.0), |
20
266c68cb6136
Minor editions, plus adding untested ApplyFunctionDataset for GradientLearner in the works.
bengioy@bengiomac.local
parents:
14
diff
changeset
|
30 regularization_coefficient = astensor(1.0)): |
13
633453635d51
Starting to work on gradient_based_learner.py
bengioy@bengiomac.local
parents:
diff
changeset
|
31 self.inputs = inputs |
633453635d51
Starting to work on gradient_based_learner.py
bengioy@bengiomac.local
parents:
diff
changeset
|
32 self.outputs = outputs |
633453635d51
Starting to work on gradient_based_learner.py
bengioy@bengiomac.local
parents:
diff
changeset
|
33 self.parameters = parameters |
633453635d51
Starting to work on gradient_based_learner.py
bengioy@bengiomac.local
parents:
diff
changeset
|
34 self.example_wise_cost = example_wise_cost |
633453635d51
Starting to work on gradient_based_learner.py
bengioy@bengiomac.local
parents:
diff
changeset
|
35 self.regularization_term = regularization_term |
633453635d51
Starting to work on gradient_based_learner.py
bengioy@bengiomac.local
parents:
diff
changeset
|
36 self.regularization_coefficient = regularization_coefficient |
633453635d51
Starting to work on gradient_based_learner.py
bengioy@bengiomac.local
parents:
diff
changeset
|
37 self.parameters_example_wise_gradient = gradient.grad(example_wise_cost, parameters) |
20
266c68cb6136
Minor editions, plus adding untested ApplyFunctionDataset for GradientLearner in the works.
bengioy@bengiomac.local
parents:
14
diff
changeset
|
38 self.parameters_regularization_gradient = gradient.grad(self.regularization_coefficient * regularization_term, parameters) |
14 | 39 if example_wise_cost not in outputs: |
40 outputs.append(example_wise_cost) | |
41 if regularization_term not in outputs: | |
42 outputs.append(regularization_term) | |
43 self.example_wise_gradient_fn = Function(inputs + parameters, | |
44 [self.parameters_example_wise_gradient + self.parameters_regularization_gradient]) | |
20
266c68cb6136
Minor editions, plus adding untested ApplyFunctionDataset for GradientLearner in the works.
bengioy@bengiomac.local
parents:
14
diff
changeset
|
45 self.use_functions = {frozenset([input.name for input in inputs]+[output.name for output in outputs]) |
266c68cb6136
Minor editions, plus adding untested ApplyFunctionDataset for GradientLearner in the works.
bengioy@bengiomac.local
parents:
14
diff
changeset
|
46 : Function(inputs, outputs)} |
13
633453635d51
Starting to work on gradient_based_learner.py
bengioy@bengiomac.local
parents:
diff
changeset
|
47 |
20
266c68cb6136
Minor editions, plus adding untested ApplyFunctionDataset for GradientLearner in the works.
bengioy@bengiomac.local
parents:
14
diff
changeset
|
48 def use(self,input_dataset,output_fields=None,copy_inputs=True): |
266c68cb6136
Minor editions, plus adding untested ApplyFunctionDataset for GradientLearner in the works.
bengioy@bengiomac.local
parents:
14
diff
changeset
|
49 # obtain the function that maps the desired inputs to desired outputs |
266c68cb6136
Minor editions, plus adding untested ApplyFunctionDataset for GradientLearner in the works.
bengioy@bengiomac.local
parents:
14
diff
changeset
|
50 input_fields = input_dataset.fieldNames() |
26
672fe4b23032
Fixed dataset errors so that _test_dataset.py works again.
bengioy@grenat.iro.umontreal.ca
parents:
23
diff
changeset
|
51 # map names of input fields to Theano tensors in self.inputs |
672fe4b23032
Fixed dataset errors so that _test_dataset.py works again.
bengioy@grenat.iro.umontreal.ca
parents:
23
diff
changeset
|
52 input_variables = ??? |
20
266c68cb6136
Minor editions, plus adding untested ApplyFunctionDataset for GradientLearner in the works.
bengioy@bengiomac.local
parents:
14
diff
changeset
|
53 if output_fields is None: output_fields = [output.name for output in outputs] |
266c68cb6136
Minor editions, plus adding untested ApplyFunctionDataset for GradientLearner in the works.
bengioy@bengiomac.local
parents:
14
diff
changeset
|
54 # handle special case of inputs that are directly copied into outputs |
26
672fe4b23032
Fixed dataset errors so that _test_dataset.py works again.
bengioy@grenat.iro.umontreal.ca
parents:
23
diff
changeset
|
55 # map names of output fields to Theano tensors in self.outputs |
672fe4b23032
Fixed dataset errors so that _test_dataset.py works again.
bengioy@grenat.iro.umontreal.ca
parents:
23
diff
changeset
|
56 output_variables = ??? |
20
266c68cb6136
Minor editions, plus adding untested ApplyFunctionDataset for GradientLearner in the works.
bengioy@bengiomac.local
parents:
14
diff
changeset
|
57 use_function_key = input_fields+output_fields |
266c68cb6136
Minor editions, plus adding untested ApplyFunctionDataset for GradientLearner in the works.
bengioy@bengiomac.local
parents:
14
diff
changeset
|
58 if not self.use_functions.has_key(use_function_key): |
26
672fe4b23032
Fixed dataset errors so that _test_dataset.py works again.
bengioy@grenat.iro.umontreal.ca
parents:
23
diff
changeset
|
59 self.use_function[use_function_key]=Function(input_variables,output_variables) |
20
266c68cb6136
Minor editions, plus adding untested ApplyFunctionDataset for GradientLearner in the works.
bengioy@bengiomac.local
parents:
14
diff
changeset
|
60 use_function = self.use_functions[use_function_key] |
23
526e192b0699
Working on ApplyFunctionDataSet, added constraint that
bengioy@esprit.iro.umontreal.ca
parents:
20
diff
changeset
|
61 # return a dataset that computes the outputs |
29
46c5c90019c2
Changed apply_function so that it propagates methods of the source.
bengioy@grenat.iro.umontreal.ca
parents:
26
diff
changeset
|
62 return input_dataset.apply_function(use_function,input_fields,output_fields,copy_inputs,compute_now=True) |
20
266c68cb6136
Minor editions, plus adding untested ApplyFunctionDataset for GradientLearner in the works.
bengioy@bengiomac.local
parents:
14
diff
changeset
|
63 |
26
672fe4b23032
Fixed dataset errors so that _test_dataset.py works again.
bengioy@grenat.iro.umontreal.ca
parents:
23
diff
changeset
|
64 |
672fe4b23032
Fixed dataset errors so that _test_dataset.py works again.
bengioy@grenat.iro.umontreal.ca
parents:
23
diff
changeset
|
65 class StochasticGradientDescent(object): |
672fe4b23032
Fixed dataset errors so that _test_dataset.py works again.
bengioy@grenat.iro.umontreal.ca
parents:
23
diff
changeset
|
66 def update_parameters(self): |
672fe4b23032
Fixed dataset errors so that _test_dataset.py works again.
bengioy@grenat.iro.umontreal.ca
parents:
23
diff
changeset
|
67 |
672fe4b23032
Fixed dataset errors so that _test_dataset.py works again.
bengioy@grenat.iro.umontreal.ca
parents:
23
diff
changeset
|
68 class StochasticGradientLearner(GradientLearner,StochasticGradientDescent): |
672fe4b23032
Fixed dataset errors so that _test_dataset.py works again.
bengioy@grenat.iro.umontreal.ca
parents:
23
diff
changeset
|
69 def __init__(self,inputs, parameters, outputs, example_wise_cost, regularization_term=astensor(0.0), |
672fe4b23032
Fixed dataset errors so that _test_dataset.py works again.
bengioy@grenat.iro.umontreal.ca
parents:
23
diff
changeset
|
70 regularization_coefficient = astensor(1.0),) |
672fe4b23032
Fixed dataset errors so that _test_dataset.py works again.
bengioy@grenat.iro.umontreal.ca
parents:
23
diff
changeset
|
71 def update() |