comparison deep/stacked_dae/v_sylvain/sgd_optimization.py @ 336:a79db7cee035

Arrange pour avoir un taux d'apprentissage decroissant decent pour NIST
author sylvainpl
date Thu, 15 Apr 2010 14:41:00 -0400
parents c2331b8e4b89
children 625c0c3fcbdb
comparison
equal deleted inserted replaced
335:5ddb1878dfbc 336:a79db7cee035
202 total_mb_index = 0 202 total_mb_index = 0
203 minibatch_index = 0 203 minibatch_index = 0
204 parameters_finetune=[] 204 parameters_finetune=[]
205 205
206 if ind_test == 21: 206 if ind_test == 21:
207 learning_rate = self.hp.finetuning_lr / 10.0 207 learning_rate = self.hp.finetuning_lr / 5.0
208 else: 208 else:
209 learning_rate = self.hp.finetuning_lr #The initial finetune lr 209 learning_rate = self.hp.finetuning_lr #The initial finetune lr
210 210
211 211
212 while (epoch < num_finetune) and (not done_looping): 212 while (epoch < num_finetune) and (not done_looping):
293 # useful when doing tests 293 # useful when doing tests
294 if self.max_minibatches and minibatch_index >= self.max_minibatches: 294 if self.max_minibatches and minibatch_index >= self.max_minibatches:
295 break 295 break
296 296
297 if decrease == 1: 297 if decrease == 1:
298 learning_rate /= 2 #divide the learning rate by 2 for each new epoch 298 if (ind_test == 21 & epoch % 100 == 0) | ind_test == 20:
299 learning_rate /= 2 #divide the learning rate by 2 for each new epoch of P07 (or 100 of NIST)
299 300
300 self.series['params'].append((epoch,), self.classifier.all_params) 301 self.series['params'].append((epoch,), self.classifier.all_params)
301 302
302 if done_looping == True: #To exit completly the fine-tuning 303 if done_looping == True: #To exit completly the fine-tuning
303 break #to exit the WHILE loop 304 break #to exit the WHILE loop