# HG changeset patch # User SylvainPL # Date 1271029964 14400 # Node ID 54ad8a091783a04005d3ee0efd1963613712ecf5 # Parent c61b72d07676cdce0a66b30b35590c2440d02a85 Rajout de la capacite de faire decroitre le taux d'apprentissage si demande diff -r c61b72d07676 -r 54ad8a091783 deep/stacked_dae/v_sylvain/stacked_dae.py --- a/deep/stacked_dae/v_sylvain/stacked_dae.py Sun Apr 11 19:52:35 2010 -0400 +++ b/deep/stacked_dae/v_sylvain/stacked_dae.py Sun Apr 11 19:52:44 2010 -0400 @@ -203,6 +203,7 @@ self.x = T.matrix('x') # the data is presented as rasterized images self.y = T.ivector('y') # the labels are presented as 1D vector of # [int] labels + self.finetune_lr = T.fscalar('finetune_lr') #To get a dynamic finetune learning rate for i in xrange( self.n_layers ): # construct the sigmoidal layer @@ -275,9 +276,9 @@ # compute list of updates updates = {} for param,gparam in zip(self.params, gparams): - updates[param] = param - gparam*finetune_lr + updates[param] = param - gparam*self.finetune_lr - self.finetune = theano.function([self.x,self.y], cost, + self.finetune = theano.function([self.x,self.y,self.finetune_lr], cost, updates = updates)#, # symbolic variable that points to the number of errors made on the @@ -302,15 +303,16 @@ #self.logistic_params+= self.logLayer2.params # construct a function that implements one step of finetunining - + + self.logistic_params+=self.logLayer2.params # compute the cost, defined as the negative log likelihood cost2 = self.logLayer2.negative_log_likelihood(self.y) # compute the gradients with respect to the model parameters - gparams2 = T.grad(cost2, self.logLayer2.params) + gparams2 = T.grad(cost2, self.logistic_params) # compute list of updates updates2 = {} - for param,gparam in zip(self.logLayer2.params, gparams2): + for param,gparam in zip(self.logistic_params, gparams2): updates2[param] = param - gparam*finetune_lr self.finetune2 = theano.function([self.x,self.y], cost2,