Mercurial > ift6266
diff deep/stacked_dae/v_sylvain/sgd_optimization.py @ 330:18dc860a4ef4
Rajout de la capacite de faire decroitre le taux d'apprentissage si demande
author | SylvainPL <sylvain.pannetier.lebeuf@umontreal.ca> |
---|---|
date | Sun, 11 Apr 2010 19:52:52 -0400 |
parents | 048898c1ee55 |
children | c2331b8e4b89 |
line wrap: on
line diff
--- a/deep/stacked_dae/v_sylvain/sgd_optimization.py Sun Apr 11 19:52:44 2010 -0400 +++ b/deep/stacked_dae/v_sylvain/sgd_optimization.py Sun Apr 11 19:52:52 2010 -0400 @@ -145,7 +145,7 @@ f.close() - def finetune(self,dataset,dataset_test,num_finetune,ind_test,special=0): + def finetune(self,dataset,dataset_test,num_finetune,ind_test,special=0,decrease=0): if special != 0 and special != 1: sys.exit('Bad value for variable special. Must be in {0,1}') @@ -200,16 +200,20 @@ epoch = 0 total_mb_index = 0 - minibatch_index = -1 + minibatch_index = 0 parameters_finetune=[] + learning_rate = self.hp.finetuning_lr #The initial finetune lr + while (epoch < num_finetune) and (not done_looping): epoch = epoch + 1 for x,y in dataset.train(minibatch_size,bufsize=buffersize): minibatch_index += 1 + + if special == 0: - cost_ij = self.classifier.finetune(x,y) + cost_ij = self.classifier.finetune(x,y,learning_rate) elif special == 1: cost_ij = self.classifier.finetune2(x,y) total_mb_index += 1 @@ -285,7 +289,10 @@ # useful when doing tests if self.max_minibatches and minibatch_index >= self.max_minibatches: break - + + if decrease == 1: + learning_rate /= 2 #divide the learning rate by 2 for each new epoch + self.series['params'].append((epoch,), self.classifier.all_params) if done_looping == True: #To exit completly the fine-tuning @@ -341,21 +348,19 @@ self.classifier.params[idx].value=theano._asarray(copy(x),dtype=theano.config.floatX) else: self.classifier.params[idx].value=copy(x) - - #Calculate error over the training set (or a part of) - def training_error(self,data): + + def training_error(self,dataset): # create a function to compute the mistakes that are made by the model # on the validation set, or testing set test_model = \ theano.function( [self.classifier.x,self.classifier.y], self.classifier.errors) - iter2 = data.train(self.hp.minibatch_size,bufsize=buffersize) + iter2 = dataset.train(self.hp.minibatch_size,bufsize=buffersize) train_losses2 = [test_model(x,y) for x,y in iter2] train_score2 = numpy.mean(train_losses2) print "Training error is: " + str(train_score2) -