# HG changeset patch # User sylvainpl # Date 1271356860 14400 # Node ID a79db7cee0355e25f370b08836e5d0512056dd50 # Parent 5ddb1878dfbcbd6bddbd17734abedc540413bdeb Arrange pour avoir un taux d'apprentissage decroissant decent pour NIST diff -r 5ddb1878dfbc -r a79db7cee035 deep/stacked_dae/v_sylvain/sgd_optimization.py --- a/deep/stacked_dae/v_sylvain/sgd_optimization.py Thu Apr 15 12:53:03 2010 -0400 +++ b/deep/stacked_dae/v_sylvain/sgd_optimization.py Thu Apr 15 14:41:00 2010 -0400 @@ -204,7 +204,7 @@ parameters_finetune=[] if ind_test == 21: - learning_rate = self.hp.finetuning_lr / 10.0 + learning_rate = self.hp.finetuning_lr / 5.0 else: learning_rate = self.hp.finetuning_lr #The initial finetune lr @@ -295,7 +295,8 @@ break if decrease == 1: - learning_rate /= 2 #divide the learning rate by 2 for each new epoch + if (ind_test == 21 & epoch % 100 == 0) | ind_test == 20: + learning_rate /= 2 #divide the learning rate by 2 for each new epoch of P07 (or 100 of NIST) self.series['params'].append((epoch,), self.classifier.all_params)