Mercurial > ift6266
diff deep/stacked_dae/v_sylvain/nist_sda_retrieve.py @ 306:a78dbbc61f37
Meilleure souplesse d'execution, un parametre hard-coade est maintenant plus propre
author | SylvainPL <sylvain.pannetier.lebeuf@umontreal.ca> |
---|---|
date | Wed, 31 Mar 2010 21:02:27 -0400 |
parents | f9b93ae45723 |
children | a76bae0f2388 |
line wrap: on
line diff
--- a/deep/stacked_dae/v_sylvain/nist_sda_retrieve.py Wed Mar 31 21:00:59 2010 -0400 +++ b/deep/stacked_dae/v_sylvain/nist_sda_retrieve.py Wed Mar 31 21:02:27 2010 -0400 @@ -84,8 +84,14 @@ ## print('\n\tpretraining with P07') ## optimizer.pretrain(datasets.nist_P07(min_file=0,max_file=nb_file)) print ('Retrieve pre-train done earlier') + + if state['pretrain_choice'] == 0: + PATH=PATH_NIST + elif state['pretrain_choice'] == 1: + PATH=PATH_P07 sys.stdout.flush() + channel.save() #Set some of the parameters used for the finetuning if state.has_key('finetune_set'): @@ -107,24 +113,24 @@ if finetune_choice == 0: print('\n\n\tfinetune with NIST\n\n') - optimizer.reload_parameters('/u/pannetis/IFT6266/ift6266/deep/stacked_dae/v_sylvain/ift6266h10_db/pannetis_finetuningSDA/1/params_pretrain.txt') + optimizer.reload_parameters(PATH+'params_pretrain.txt') optimizer.finetune(datasets.nist_all(),datasets.nist_P07(min_file=nb_file),max_finetune_epoch_NIST,ind_test=1) channel.save() if finetune_choice == 1: print('\n\n\tfinetune with P07\n\n') - optimizer.reload_parameters('/u/pannetis/IFT6266/ift6266/deep/stacked_dae/v_sylvain/ift6266h10_db/pannetis_finetuningSDA/1/params_pretrain.txt') + optimizer.reload_parameters(PATH+'params_pretrain.txt') optimizer.finetune(datasets.nist_P07(min_file=nb_file),datasets.nist_all(),max_finetune_epoch_P07,ind_test=0) channel.save() if finetune_choice == 2: print('\n\n\tfinetune with NIST followed by P07\n\n') - optimizer.reload_parameters('/u/pannetis/IFT6266/ift6266/deep/stacked_dae/v_sylvain/ift6266h10_db/pannetis_finetuningSDA/1/params_pretrain.txt') + optimizer.reload_parameters(PATH+'params_pretrain.txt') optimizer.finetune(datasets.nist_all(),datasets.nist_P07(min_file=nb_file),max_finetune_epoch_NIST,ind_test=21) optimizer.finetune(datasets.nist_P07(min_file=nb_file),datasets.nist_all(),max_finetune_epoch_P07,ind_test=20) channel.save() if finetune_choice == 3: print('\n\n\tfinetune with NIST only on the logistic regression on top (but validation on P07).\n\ All hidden units output are input of the logistic regression\n\n') - optimizer.reload_parameters('/u/pannetis/IFT6266/ift6266/deep/stacked_dae/v_sylvain/ift6266h10_db/pannetis_finetuningSDA/1/params_pretrain.txt') + optimizer.reload_parameters(PATH+'params_pretrain.txt') optimizer.finetune(datasets.nist_all(),datasets.nist_P07(min_file=nb_file),max_finetune_epoch_NIST,ind_test=1,special=1) @@ -132,23 +138,23 @@ print('\nSERIE OF 3 DIFFERENT FINETUNINGS') print('\n\n\tfinetune with NIST\n\n') sys.stdout.flush() - optimizer.reload_parameters('/u/pannetis/IFT6266/ift6266/deep/stacked_dae/v_sylvain/ift6266h10_db/pannetis_finetuningSDA/1/params_pretrain.txt') + optimizer.reload_parameters(PATH+'params_pretrain.txt') optimizer.finetune(datasets.nist_all(),datasets.nist_P07(min_file=nb_file),max_finetune_epoch_NIST,ind_test=1) channel.save() print('\n\n\tfinetune with P07\n\n') sys.stdout.flush() - optimizer.reload_parameters('/u/pannetis/IFT6266/ift6266/deep/stacked_dae/v_sylvain/ift6266h10_db/pannetis_finetuningSDA/1/params_pretrain.txt') + optimizer.reload_parameters(PATH+'params_pretrain.txt') optimizer.finetune(datasets.nist_P07(min_file=nb_file),datasets.nist_all(),max_finetune_epoch_P07,ind_test=0) channel.save() print('\n\n\tfinetune with NIST (done earlier) followed by P07 (written here)\n\n') sys.stdout.flush() - optimizer.reload_parameters('/u/pannetis/IFT6266/ift6266/deep/stacked_dae/v_sylvain/ift6266h10_db/pannetis_finetuningSDA/1/params_finetune_NIST.txt') + optimizer.reload_parameters('params_finetune_NIST.txt') optimizer.finetune(datasets.nist_P07(min_file=nb_file),datasets.nist_all(),max_finetune_epoch_P07,ind_test=20) channel.save() print('\n\n\tfinetune with NIST only on the logistic regression on top.\n\ All hidden units output are input of the logistic regression\n\n') sys.stdout.flush() - optimizer.reload_parameters('/u/pannetis/IFT6266/ift6266/deep/stacked_dae/v_sylvain/ift6266h10_db/pannetis_finetuningSDA/1/params_pretrain.txt') + optimizer.reload_parameters(PATH+'params_pretrain.txt') optimizer.finetune(datasets.nist_all(),datasets.nist_P07(min_file=nb_file),max_finetune_epoch_NIST,ind_test=1,special=1) channel.save()