Mercurial > ift6266
comparison deep/stacked_dae/v_sylvain/nist_sda.py @ 419:c91d7b67fa41
Correction d'une petite erreur dans le nom des fichiers de parametres de pretrain
author | SylvainPL <sylvain.pannetier.lebeuf@umontreal.ca> |
---|---|
date | Fri, 30 Apr 2010 14:48:08 -0400 |
parents | 7c201ca1484f |
children |
comparison
equal
deleted
inserted
replaced
418:fb028b37ce92 | 419:c91d7b67fa41 |
---|---|
156 All hidden units output are input of the logistic regression\n\n') | 156 All hidden units output are input of the logistic regression\n\n') |
157 optimizer.reload_parameters('params_pretrain.txt') | 157 optimizer.reload_parameters('params_pretrain.txt') |
158 optimizer.finetune(datasets.nist_all(),datasets.nist_P07(),max_finetune_epoch_NIST,ind_test=1,special=1,decrease=decrease_lr) | 158 optimizer.finetune(datasets.nist_all(),datasets.nist_P07(),max_finetune_epoch_NIST,ind_test=1,special=1,decrease=decrease_lr) |
159 if finetune_choice == 4: | 159 if finetune_choice == 4: |
160 print ('\n\n\tFinetune with PNIST07 then NIST\n\n') | 160 print ('\n\n\tFinetune with PNIST07 then NIST\n\n') |
161 optimizer.reload_parameters('params)pretrain.txt') | 161 optimizer.reload_parameters('params_pretrain.txt') |
162 optimizer.finetune(datasets.PNIST07(),datasets.nist_all(),max_finetune_epoch_NIST,ind_test=30,decrease=decrease_lr,dataset_test2=datasets.nist_P07()) | 162 optimizer.finetune(datasets.PNIST07(),datasets.nist_all(),max_finetune_epoch_NIST,ind_test=30,decrease=decrease_lr,dataset_test2=datasets.nist_P07()) |
163 optimizer.finetune(datasets.nist_all(),datasets.PNIST07(),max_finetune_epoch_NIST,ind_test=31,decrease=decrease_lr,dataset_test2=datasets.nist_P07()) | 163 optimizer.finetune(datasets.nist_all(),datasets.PNIST07(),max_finetune_epoch_NIST,ind_test=31,decrease=decrease_lr,dataset_test2=datasets.nist_P07()) |
164 | 164 |
165 if finetune_choice==-1: | 165 if finetune_choice==-1: |
166 print('\nSERIE OF 4 DIFFERENT FINETUNINGS') | 166 print('\nSERIE OF 4 DIFFERENT FINETUNINGS') |
185 optimizer.reload_parameters('params_pretrain.txt') | 185 optimizer.reload_parameters('params_pretrain.txt') |
186 optimizer.finetune(datasets.nist_all(),datasets.nist_P07(),max_finetune_epoch_NIST,ind_test=1,special=1,decrease=decrease_lr) | 186 optimizer.finetune(datasets.nist_all(),datasets.nist_P07(),max_finetune_epoch_NIST,ind_test=1,special=1,decrease=decrease_lr) |
187 channel.save() | 187 channel.save() |
188 print ('\n\n\tFinetune with PNIST07\n\n') | 188 print ('\n\n\tFinetune with PNIST07\n\n') |
189 sys.stdout.flush() | 189 sys.stdout.flush() |
190 optimizer.reload_parameters('params)pretrain.txt') | 190 optimizer.reload_parameters('params_pretrain.txt') |
191 optimizer.finetune(datasets.PNIST07(),datasets.nist_all(),max_finetune_epoch_NIST,ind_test=2,decrease=decrease_lr,dataset_test2=datasets.nist_P07()) | 191 optimizer.finetune(datasets.PNIST07(),datasets.nist_all(),max_finetune_epoch_NIST,ind_test=2,decrease=decrease_lr,dataset_test2=datasets.nist_P07()) |
192 channel.save() | 192 channel.save() |
193 sys.stdout.flush() | 193 sys.stdout.flush() |
194 optimizer.finetune(datasets.nist_all(),datasets.PNIST07(),max_finetune_epoch_NIST,ind_test=31,decrease=decrease_lr,dataset_test2=datasets.nist_P07()) | 194 optimizer.finetune(datasets.nist_all(),datasets.PNIST07(),max_finetune_epoch_NIST,ind_test=31,decrease=decrease_lr,dataset_test2=datasets.nist_P07()) |
195 | 195 |