Mercurial > pylearn
comparison gradient_learner.py @ 26:672fe4b23032
Fixed dataset errors so that _test_dataset.py works again.
author | bengioy@grenat.iro.umontreal.ca |
---|---|
date | Fri, 11 Apr 2008 11:14:54 -0400 |
parents | 526e192b0699 |
children | 46c5c90019c2 |
comparison
equal
deleted
inserted
replaced
23:526e192b0699 | 26:672fe4b23032 |
---|---|
24 The learned function can map a subset of inputs to a subset of outputs (as long as the inputs subset | 24 The learned function can map a subset of inputs to a subset of outputs (as long as the inputs subset |
25 includes all the inputs required in the Theano expression for the selected outputs). | 25 includes all the inputs required in the Theano expression for the selected outputs). |
26 It is assumed that all the inputs are provided in the training set (as dataset fields | 26 It is assumed that all the inputs are provided in the training set (as dataset fields |
27 with the corresponding name), but not necessarily when using the learned function. | 27 with the corresponding name), but not necessarily when using the learned function. |
28 """ | 28 """ |
29 def __init__(self, inputs, parameters, outputs, example_wise_cost, regularization_term, | 29 def __init__(self, inputs, parameters, outputs, example_wise_cost, regularization_term=astensor(0.0), |
30 regularization_coefficient = astensor(1.0)): | 30 regularization_coefficient = astensor(1.0)): |
31 self.inputs = inputs | 31 self.inputs = inputs |
32 self.outputs = outputs | 32 self.outputs = outputs |
33 self.parameters = parameters | 33 self.parameters = parameters |
34 self.example_wise_cost = example_wise_cost | 34 self.example_wise_cost = example_wise_cost |
46 : Function(inputs, outputs)} | 46 : Function(inputs, outputs)} |
47 | 47 |
48 def use(self,input_dataset,output_fields=None,copy_inputs=True): | 48 def use(self,input_dataset,output_fields=None,copy_inputs=True): |
49 # obtain the function that maps the desired inputs to desired outputs | 49 # obtain the function that maps the desired inputs to desired outputs |
50 input_fields = input_dataset.fieldNames() | 50 input_fields = input_dataset.fieldNames() |
51 # map names of input fields to Theano tensors in self.inputs | |
52 input_variables = ??? | |
51 if output_fields is None: output_fields = [output.name for output in outputs] | 53 if output_fields is None: output_fields = [output.name for output in outputs] |
52 # handle special case of inputs that are directly copied into outputs | 54 # handle special case of inputs that are directly copied into outputs |
53 | 55 # map names of output fields to Theano tensors in self.outputs |
56 output_variables = ??? | |
54 use_function_key = input_fields+output_fields | 57 use_function_key = input_fields+output_fields |
55 if not self.use_functions.has_key(use_function_key): | 58 if not self.use_functions.has_key(use_function_key): |
56 self.use_function[use_function_key]=Function(input_fields,output_fields) | 59 self.use_function[use_function_key]=Function(input_variables,output_variables) |
57 use_function = self.use_functions[use_function_key] | 60 use_function = self.use_functions[use_function_key] |
58 # return a dataset that computes the outputs | 61 # return a dataset that computes the outputs |
59 return input_dataset.applyFunction(use_function,input_fields,output_fields,copy_inputs,compute_now=True) | 62 return input_dataset.applyFunction(use_function,input_fields,output_fields,copy_inputs,compute_now=True) |
60 | 63 |
64 | |
65 class StochasticGradientDescent(object): | |
66 def update_parameters(self): | |
67 | |
68 class StochasticGradientLearner(GradientLearner,StochasticGradientDescent): | |
69 def __init__(self,inputs, parameters, outputs, example_wise_cost, regularization_term=astensor(0.0), | |
70 regularization_coefficient = astensor(1.0),) | |
71 def update() |