Mercurial > ift6266
changeset 382:87e684bfe538
Ajout de la faculte de prendre PNIST07
author | SylvainPL <sylvain.pannetier.lebeuf@umontreal.ca> |
---|---|
date | Tue, 27 Apr 2010 08:18:10 -0400 |
parents | 0a91fc69ff90 |
children | 5c3935aa3f8a |
files | deep/stacked_dae/v_sylvain/nist_sda.py |
diffstat | 1 files changed, 9 insertions(+), 1 deletions(-) [+] |
line wrap: on
line diff
--- a/deep/stacked_dae/v_sylvain/nist_sda.py Mon Apr 26 22:36:07 2010 -0400 +++ b/deep/stacked_dae/v_sylvain/nist_sda.py Tue Apr 27 08:18:10 2010 -0400 @@ -144,7 +144,10 @@ All hidden units output are input of the logistic regression\n\n') optimizer.reload_parameters('params_pretrain.txt') optimizer.finetune(datasets.nist_all(),datasets.nist_P07(),max_finetune_epoch_NIST,ind_test=1,special=1,decrease=decrease_lr) - + if finetune_choice == 4: + print ('\n\n\tFinetune with PNIST07\n\n') + optimizer.reload_parameters('params)pretrain.txt') + optimizer.finetune(datasets.PNIST07(),datasets.nist_all(),max_finetune_epoch_NIST,ind_test=2,decrease=decrease_lr,dataset_test2=datasets.nist_P07()) if finetune_choice==-1: print('\nSERIE OF 4 DIFFERENT FINETUNINGS') @@ -169,6 +172,11 @@ optimizer.reload_parameters('params_pretrain.txt') optimizer.finetune(datasets.nist_all(),datasets.nist_P07(),max_finetune_epoch_NIST,ind_test=1,special=1,decrease=decrease_lr) channel.save() + print ('\n\n\tFinetune with PNIST07\n\n') + sys.stdout.flush() + optimizer.reload_parameters('params)pretrain.txt') + optimizer.finetune(datasets.PNIST07(),datasets.nist_all(),max_finetune_epoch_NIST,ind_test=2,decrease=decrease_lr,dataset_test2=datasets.nist_P07()) + channel.save()