Mercurial > ift6266
diff deep/stacked_dae/v_sylvain/sgd_optimization.py @ 331:c2331b8e4b89
Ajout d'une ligne faisant rapetisser le finetune_lr pour NIST lorsqu'on a P07+NIST
author | SylvainPL <sylvain.pannetier.lebeuf@umontreal.ca> |
---|---|
date | Wed, 14 Apr 2010 10:17:33 -0400 |
parents | 18dc860a4ef4 |
children | a79db7cee035 |
line wrap: on
line diff
--- a/deep/stacked_dae/v_sylvain/sgd_optimization.py Sun Apr 11 19:52:52 2010 -0400 +++ b/deep/stacked_dae/v_sylvain/sgd_optimization.py Wed Apr 14 10:17:33 2010 -0400 @@ -202,7 +202,11 @@ total_mb_index = 0 minibatch_index = 0 parameters_finetune=[] - learning_rate = self.hp.finetuning_lr #The initial finetune lr + + if ind_test == 21: + learning_rate = self.hp.finetuning_lr / 10.0 + else: + learning_rate = self.hp.finetuning_lr #The initial finetune lr while (epoch < num_finetune) and (not done_looping): @@ -320,10 +324,11 @@ pickle.dump(parameters_finetune,f) f.close() - elif ind_test== 0: #To keep a track of the value of the parameters + elif ind_test == 0 | ind_test == 20: #To keep a track of the value of the parameters f = open('params_finetune_P07.txt', 'w') pickle.dump(parameters_finetune,f) f.close() + elif ind_test== 1: #For the run with 2 finetunes. It will be faster. f = open('params_finetune_NIST.txt', 'w')