Mercurial > ift6266
comparison deep/stacked_dae/v_sylvain/nist_sda_retrieve.py @ 306:a78dbbc61f37
Meilleure souplesse d'execution, un parametre hard-coade est maintenant plus propre
author | SylvainPL <sylvain.pannetier.lebeuf@umontreal.ca> |
---|---|
date | Wed, 31 Mar 2010 21:02:27 -0400 |
parents | f9b93ae45723 |
children | a76bae0f2388 |
comparison
equal
deleted
inserted
replaced
305:fe5d428c2acc | 306:a78dbbc61f37 |
---|---|
82 ## "You have to correct the code (and be patient, P07 is huge !!)\n"+ | 82 ## "You have to correct the code (and be patient, P07 is huge !!)\n"+ |
83 ## "or reduce the number of pretraining epoch to run the code (better idea).\n") | 83 ## "or reduce the number of pretraining epoch to run the code (better idea).\n") |
84 ## print('\n\tpretraining with P07') | 84 ## print('\n\tpretraining with P07') |
85 ## optimizer.pretrain(datasets.nist_P07(min_file=0,max_file=nb_file)) | 85 ## optimizer.pretrain(datasets.nist_P07(min_file=0,max_file=nb_file)) |
86 print ('Retrieve pre-train done earlier') | 86 print ('Retrieve pre-train done earlier') |
87 | |
88 if state['pretrain_choice'] == 0: | |
89 PATH=PATH_NIST | |
90 elif state['pretrain_choice'] == 1: | |
91 PATH=PATH_P07 | |
87 | 92 |
88 sys.stdout.flush() | 93 sys.stdout.flush() |
94 channel.save() | |
89 | 95 |
90 #Set some of the parameters used for the finetuning | 96 #Set some of the parameters used for the finetuning |
91 if state.has_key('finetune_set'): | 97 if state.has_key('finetune_set'): |
92 finetune_choice=state['finetune_set'] | 98 finetune_choice=state['finetune_set'] |
93 else: | 99 else: |
105 | 111 |
106 #Decide how the finetune is done | 112 #Decide how the finetune is done |
107 | 113 |
108 if finetune_choice == 0: | 114 if finetune_choice == 0: |
109 print('\n\n\tfinetune with NIST\n\n') | 115 print('\n\n\tfinetune with NIST\n\n') |
110 optimizer.reload_parameters('/u/pannetis/IFT6266/ift6266/deep/stacked_dae/v_sylvain/ift6266h10_db/pannetis_finetuningSDA/1/params_pretrain.txt') | 116 optimizer.reload_parameters(PATH+'params_pretrain.txt') |
111 optimizer.finetune(datasets.nist_all(),datasets.nist_P07(min_file=nb_file),max_finetune_epoch_NIST,ind_test=1) | 117 optimizer.finetune(datasets.nist_all(),datasets.nist_P07(min_file=nb_file),max_finetune_epoch_NIST,ind_test=1) |
112 channel.save() | 118 channel.save() |
113 if finetune_choice == 1: | 119 if finetune_choice == 1: |
114 print('\n\n\tfinetune with P07\n\n') | 120 print('\n\n\tfinetune with P07\n\n') |
115 optimizer.reload_parameters('/u/pannetis/IFT6266/ift6266/deep/stacked_dae/v_sylvain/ift6266h10_db/pannetis_finetuningSDA/1/params_pretrain.txt') | 121 optimizer.reload_parameters(PATH+'params_pretrain.txt') |
116 optimizer.finetune(datasets.nist_P07(min_file=nb_file),datasets.nist_all(),max_finetune_epoch_P07,ind_test=0) | 122 optimizer.finetune(datasets.nist_P07(min_file=nb_file),datasets.nist_all(),max_finetune_epoch_P07,ind_test=0) |
117 channel.save() | 123 channel.save() |
118 if finetune_choice == 2: | 124 if finetune_choice == 2: |
119 print('\n\n\tfinetune with NIST followed by P07\n\n') | 125 print('\n\n\tfinetune with NIST followed by P07\n\n') |
120 optimizer.reload_parameters('/u/pannetis/IFT6266/ift6266/deep/stacked_dae/v_sylvain/ift6266h10_db/pannetis_finetuningSDA/1/params_pretrain.txt') | 126 optimizer.reload_parameters(PATH+'params_pretrain.txt') |
121 optimizer.finetune(datasets.nist_all(),datasets.nist_P07(min_file=nb_file),max_finetune_epoch_NIST,ind_test=21) | 127 optimizer.finetune(datasets.nist_all(),datasets.nist_P07(min_file=nb_file),max_finetune_epoch_NIST,ind_test=21) |
122 optimizer.finetune(datasets.nist_P07(min_file=nb_file),datasets.nist_all(),max_finetune_epoch_P07,ind_test=20) | 128 optimizer.finetune(datasets.nist_P07(min_file=nb_file),datasets.nist_all(),max_finetune_epoch_P07,ind_test=20) |
123 channel.save() | 129 channel.save() |
124 if finetune_choice == 3: | 130 if finetune_choice == 3: |
125 print('\n\n\tfinetune with NIST only on the logistic regression on top (but validation on P07).\n\ | 131 print('\n\n\tfinetune with NIST only on the logistic regression on top (but validation on P07).\n\ |
126 All hidden units output are input of the logistic regression\n\n') | 132 All hidden units output are input of the logistic regression\n\n') |
127 optimizer.reload_parameters('/u/pannetis/IFT6266/ift6266/deep/stacked_dae/v_sylvain/ift6266h10_db/pannetis_finetuningSDA/1/params_pretrain.txt') | 133 optimizer.reload_parameters(PATH+'params_pretrain.txt') |
128 optimizer.finetune(datasets.nist_all(),datasets.nist_P07(min_file=nb_file),max_finetune_epoch_NIST,ind_test=1,special=1) | 134 optimizer.finetune(datasets.nist_all(),datasets.nist_P07(min_file=nb_file),max_finetune_epoch_NIST,ind_test=1,special=1) |
129 | 135 |
130 | 136 |
131 if finetune_choice==-1: | 137 if finetune_choice==-1: |
132 print('\nSERIE OF 3 DIFFERENT FINETUNINGS') | 138 print('\nSERIE OF 3 DIFFERENT FINETUNINGS') |
133 print('\n\n\tfinetune with NIST\n\n') | 139 print('\n\n\tfinetune with NIST\n\n') |
134 sys.stdout.flush() | 140 sys.stdout.flush() |
135 optimizer.reload_parameters('/u/pannetis/IFT6266/ift6266/deep/stacked_dae/v_sylvain/ift6266h10_db/pannetis_finetuningSDA/1/params_pretrain.txt') | 141 optimizer.reload_parameters(PATH+'params_pretrain.txt') |
136 optimizer.finetune(datasets.nist_all(),datasets.nist_P07(min_file=nb_file),max_finetune_epoch_NIST,ind_test=1) | 142 optimizer.finetune(datasets.nist_all(),datasets.nist_P07(min_file=nb_file),max_finetune_epoch_NIST,ind_test=1) |
137 channel.save() | 143 channel.save() |
138 print('\n\n\tfinetune with P07\n\n') | 144 print('\n\n\tfinetune with P07\n\n') |
139 sys.stdout.flush() | 145 sys.stdout.flush() |
140 optimizer.reload_parameters('/u/pannetis/IFT6266/ift6266/deep/stacked_dae/v_sylvain/ift6266h10_db/pannetis_finetuningSDA/1/params_pretrain.txt') | 146 optimizer.reload_parameters(PATH+'params_pretrain.txt') |
141 optimizer.finetune(datasets.nist_P07(min_file=nb_file),datasets.nist_all(),max_finetune_epoch_P07,ind_test=0) | 147 optimizer.finetune(datasets.nist_P07(min_file=nb_file),datasets.nist_all(),max_finetune_epoch_P07,ind_test=0) |
142 channel.save() | 148 channel.save() |
143 print('\n\n\tfinetune with NIST (done earlier) followed by P07 (written here)\n\n') | 149 print('\n\n\tfinetune with NIST (done earlier) followed by P07 (written here)\n\n') |
144 sys.stdout.flush() | 150 sys.stdout.flush() |
145 optimizer.reload_parameters('/u/pannetis/IFT6266/ift6266/deep/stacked_dae/v_sylvain/ift6266h10_db/pannetis_finetuningSDA/1/params_finetune_NIST.txt') | 151 optimizer.reload_parameters('params_finetune_NIST.txt') |
146 optimizer.finetune(datasets.nist_P07(min_file=nb_file),datasets.nist_all(),max_finetune_epoch_P07,ind_test=20) | 152 optimizer.finetune(datasets.nist_P07(min_file=nb_file),datasets.nist_all(),max_finetune_epoch_P07,ind_test=20) |
147 channel.save() | 153 channel.save() |
148 print('\n\n\tfinetune with NIST only on the logistic regression on top.\n\ | 154 print('\n\n\tfinetune with NIST only on the logistic regression on top.\n\ |
149 All hidden units output are input of the logistic regression\n\n') | 155 All hidden units output are input of the logistic regression\n\n') |
150 sys.stdout.flush() | 156 sys.stdout.flush() |
151 optimizer.reload_parameters('/u/pannetis/IFT6266/ift6266/deep/stacked_dae/v_sylvain/ift6266h10_db/pannetis_finetuningSDA/1/params_pretrain.txt') | 157 optimizer.reload_parameters(PATH+'params_pretrain.txt') |
152 optimizer.finetune(datasets.nist_all(),datasets.nist_P07(min_file=nb_file),max_finetune_epoch_NIST,ind_test=1,special=1) | 158 optimizer.finetune(datasets.nist_all(),datasets.nist_P07(min_file=nb_file),max_finetune_epoch_NIST,ind_test=1,special=1) |
153 channel.save() | 159 channel.save() |
154 | 160 |
155 channel.save() | 161 channel.save() |
156 | 162 |