Mercurial > ift6266
comparison deep/stacked_dae/v_sylvain/nist_sda.py @ 386:8875853b5bfc
Ajout d'une option finetune amelioree pour PNIST07
author | SylvainPL <sylvain.pannetier.lebeuf@umontreal.ca> |
---|---|
date | Tue, 27 Apr 2010 08:41:44 -0400 |
parents | 87e684bfe538 |
children | 7c201ca1484f |
comparison
equal
deleted
inserted
replaced
385:442fc117e886 | 386:8875853b5bfc |
---|---|
101 sys.exit("The code does not support this much pretraining epoch (99 max with P07).\n"+ | 101 sys.exit("The code does not support this much pretraining epoch (99 max with P07).\n"+ |
102 "You have to correct the code (and be patient, P07 is huge !!)\n"+ | 102 "You have to correct the code (and be patient, P07 is huge !!)\n"+ |
103 "or reduce the number of pretraining epoch to run the code (better idea).\n") | 103 "or reduce the number of pretraining epoch to run the code (better idea).\n") |
104 print('\n\tpretraining with P07') | 104 print('\n\tpretraining with P07') |
105 optimizer.pretrain(datasets.nist_P07(min_file=0,max_file=nb_file),decrease = dec) | 105 optimizer.pretrain(datasets.nist_P07(min_file=0,max_file=nb_file),decrease = dec) |
106 elif state['pretrain_choice'] == 2: | |
107 nb_file = int(state['pretraining_epochs_per_layer']) | |
108 state['pretraining_epochs_per_layer'] = 1 #Only 1 time over the dataset | |
109 if nb_file >=100: | |
110 sys.exit("The code does not support this much pretraining epoch (99 max with P07).\n"+ | |
111 "You have to correct the code (and be patient, P07 is huge !!)\n"+ | |
112 "or reduce the number of pretraining epoch to run the code (better idea).\n") | |
113 print('\n\tpretraining with PNIST07') | |
114 optimizer.pretrain(datasets.PNIST07(min_file=0,max_file=nb_file),decrease = dec) | |
115 | |
106 channel.save() | 116 channel.save() |
107 | 117 |
108 #Set some of the parameters used for the finetuning | 118 #Set some of the parameters used for the finetuning |
109 if state.has_key('finetune_set'): | 119 if state.has_key('finetune_set'): |
110 finetune_choice=state['finetune_set'] | 120 finetune_choice=state['finetune_set'] |
143 print('\n\n\tfinetune with NIST only on the logistic regression on top (but validation on P07).\n\ | 153 print('\n\n\tfinetune with NIST only on the logistic regression on top (but validation on P07).\n\ |
144 All hidden units output are input of the logistic regression\n\n') | 154 All hidden units output are input of the logistic regression\n\n') |
145 optimizer.reload_parameters('params_pretrain.txt') | 155 optimizer.reload_parameters('params_pretrain.txt') |
146 optimizer.finetune(datasets.nist_all(),datasets.nist_P07(),max_finetune_epoch_NIST,ind_test=1,special=1,decrease=decrease_lr) | 156 optimizer.finetune(datasets.nist_all(),datasets.nist_P07(),max_finetune_epoch_NIST,ind_test=1,special=1,decrease=decrease_lr) |
147 if finetune_choice == 4: | 157 if finetune_choice == 4: |
148 print ('\n\n\tFinetune with PNIST07\n\n') | 158 print ('\n\n\tFinetune with PNIST07 then NIST\n\n') |
149 optimizer.reload_parameters('params)pretrain.txt') | 159 optimizer.reload_parameters('params)pretrain.txt') |
150 optimizer.finetune(datasets.PNIST07(),datasets.nist_all(),max_finetune_epoch_NIST,ind_test=2,decrease=decrease_lr,dataset_test2=datasets.nist_P07()) | 160 optimizer.finetune(datasets.PNIST07(),datasets.nist_all(),max_finetune_epoch_NIST,ind_test=30,decrease=decrease_lr,dataset_test2=datasets.nist_P07()) |
161 optimizer.finetune(datasets.nist_all(),datasets.PNIST07(),max_finetune_epoch_NIST,ind_test=31,decrease=decrease_lr,dataset_test2=datasets.nist_P07()) | |
151 | 162 |
152 if finetune_choice==-1: | 163 if finetune_choice==-1: |
153 print('\nSERIE OF 4 DIFFERENT FINETUNINGS') | 164 print('\nSERIE OF 4 DIFFERENT FINETUNINGS') |
154 print('\n\n\tfinetune with NIST\n\n') | 165 print('\n\n\tfinetune with NIST\n\n') |
155 sys.stdout.flush() | 166 sys.stdout.flush() |
174 channel.save() | 185 channel.save() |
175 print ('\n\n\tFinetune with PNIST07\n\n') | 186 print ('\n\n\tFinetune with PNIST07\n\n') |
176 sys.stdout.flush() | 187 sys.stdout.flush() |
177 optimizer.reload_parameters('params)pretrain.txt') | 188 optimizer.reload_parameters('params)pretrain.txt') |
178 optimizer.finetune(datasets.PNIST07(),datasets.nist_all(),max_finetune_epoch_NIST,ind_test=2,decrease=decrease_lr,dataset_test2=datasets.nist_P07()) | 189 optimizer.finetune(datasets.PNIST07(),datasets.nist_all(),max_finetune_epoch_NIST,ind_test=2,decrease=decrease_lr,dataset_test2=datasets.nist_P07()) |
190 channel.save() | |
191 sys.stdout.flush() | |
192 optimizer.finetune(datasets.nist_all(),datasets.PNIST07(),max_finetune_epoch_NIST,ind_test=31,decrease=decrease_lr,dataset_test2=datasets.nist_P07()) | |
179 | 193 |
180 | 194 |
181 channel.save() | 195 channel.save() |
182 | 196 |
183 return channel.COMPLETE | 197 return channel.COMPLETE |