# HG changeset patch # User SylvainPL # Date 1271254653 14400 # Node ID c2331b8e4b895b720a7b7e9c17a64afec7c99d1e # Parent 18dc860a4ef4fd8cd915c1c711a3fc705525810c Ajout d'une ligne faisant rapetisser le finetune_lr pour NIST lorsqu'on a P07+NIST diff -r 18dc860a4ef4 -r c2331b8e4b89 deep/stacked_dae/v_sylvain/sgd_optimization.py --- a/deep/stacked_dae/v_sylvain/sgd_optimization.py Sun Apr 11 19:52:52 2010 -0400 +++ b/deep/stacked_dae/v_sylvain/sgd_optimization.py Wed Apr 14 10:17:33 2010 -0400 @@ -202,7 +202,11 @@ total_mb_index = 0 minibatch_index = 0 parameters_finetune=[] - learning_rate = self.hp.finetuning_lr #The initial finetune lr + + if ind_test == 21: + learning_rate = self.hp.finetuning_lr / 10.0 + else: + learning_rate = self.hp.finetuning_lr #The initial finetune lr while (epoch < num_finetune) and (not done_looping): @@ -320,10 +324,11 @@ pickle.dump(parameters_finetune,f) f.close() - elif ind_test== 0: #To keep a track of the value of the parameters + elif ind_test == 0 | ind_test == 20: #To keep a track of the value of the parameters f = open('params_finetune_P07.txt', 'w') pickle.dump(parameters_finetune,f) f.close() + elif ind_test== 1: #For the run with 2 finetunes. It will be faster. f = open('params_finetune_NIST.txt', 'w')