Mercurial > pylearn
diff learner.py @ 180:2698c0feeb54
mlp seems to work!
author | Yoshua Bengio <bengioy@iro.umontreal.ca> |
---|---|
date | Tue, 13 May 2008 15:35:43 -0400 |
parents | 69759976b3ac |
children | cb6b945acf5a |
line wrap: on
line diff
--- a/learner.py Tue May 13 15:14:04 2008 -0400 +++ b/learner.py Tue May 13 15:35:43 2008 -0400 @@ -4,7 +4,9 @@ import theano from theano import compile from theano import tensor as t - +from misc import Print +Print = lambda x: lambda y: y + class Learner(AttributesHolder): """ Base class for learning algorithms, provides an interface @@ -188,7 +190,7 @@ stats collectors. All these attributes are expected to be theano.Result objects (with a .data property and recognized by - theano.Function for compilation). The sub-class + theano.function for compilation). The sub-class constructor defines the relations between the Theano variables that may be used by 'use' and 'update' or by a stats collector. @@ -210,9 +212,10 @@ dependant de Theano """ - def __init__(self): + def __init__(self,linker="c|py"): Learner.__init__(self) self.use_functions_dictionary={} + self.linker=linker def defaultOutputFields(self, input_fields): """ @@ -238,7 +241,8 @@ use_input_attributes = self.useInputAttributes() use_output_attributes = self.useOutputAttributes() complete_f = compile.function(self.names2OpResults(input_fields+use_input_attributes), - self.names2OpResults(output_fields+use_output_attributes)) + self.names2OpResults(output_fields+use_output_attributes), + self.linker) def f(*input_field_values): input_attribute_values = self.names2attributes(use_input_attributes) results = complete_f(*(list(input_field_values) + input_attribute_values)) @@ -276,13 +280,15 @@ """ - def __init__(self): - TLearner.__init__(self) + def __init__(self,linker="c|py"): + TLearner.__init__(self,linker) self.update_minibatch_function = compile.function(self.names2OpResults(self.updateMinibatchInputAttributes()+ self.updateMinibatchInputFields()), - self.names2OpResults(self.updateMinibatchOutputAttributes())) + self.names2OpResults(self.updateMinibatchOutputAttributes()), + linker) self.update_end_function = compile.function(self.names2OpResults(self.updateEndInputAttributes()), - self.names2OpResults(self.updateEndOutputAttributes())) + self.names2OpResults(self.updateEndOutputAttributes()), + linker) def allocate(self, minibatch): """ @@ -369,7 +375,7 @@ - self._learning_rate (may be changed by the sub-class between epochs or minibatches) - self.lossAttribute() = name of the loss field """ - def __init__(self,truly_online=False): + def __init__(self,truly_online=False,linker="c|py"): """ If truly_online then only one pass is made through the training set passed to update(). @@ -382,9 +388,9 @@ new_params_names = ["_new_"+name for name in self.parameterAttributes()] loss = self.__getattribute__("_"+self.lossAttribute()) self.setAttributes(new_params_names, - [t.add_inplace(param,self._learning_rate*t.grad(loss,param)) + [t.add_inplace(param,-self._learning_rate*Print("grad("+param.name+")")(t.grad(loss,param))) for param in old_params]) - MinibatchUpdatesTLearner.__init__(self) + MinibatchUpdatesTLearner.__init__(self,linker) def namesOfAttributesToComputeOutputs(self,output_names): @@ -408,7 +414,7 @@ return self.truly_online def updateMinibatchInputAttributes(self): - return self.parameterAttributes() + return self.parameterAttributes()+["learning_rate"] def updateMinibatchOutputAttributes(self): return ["new_"+name for name in self.parameterAttributes()]