Mercurial > ift6266
comparison deep/stacked_dae/v_sylvain/nist_sda_retrieve.py @ 313:e301a2f32665
Avoir exactement le meme jeu de donnees pour pre-train et finetune
author | SylvainPL <sylvain.pannetier.lebeuf@umontreal.ca> |
---|---|
date | Thu, 01 Apr 2010 14:25:55 -0400 |
parents | 8b31280129a9 |
children | 60e82846a10d |
comparison
equal
deleted
inserted
replaced
312:bd6085d77706 | 313:e301a2f32665 |
---|---|
116 #Decide how the finetune is done | 116 #Decide how the finetune is done |
117 | 117 |
118 if finetune_choice == 0: | 118 if finetune_choice == 0: |
119 print('\n\n\tfinetune with NIST\n\n') | 119 print('\n\n\tfinetune with NIST\n\n') |
120 optimizer.reload_parameters(PATH+'params_pretrain.txt') | 120 optimizer.reload_parameters(PATH+'params_pretrain.txt') |
121 optimizer.finetune(datasets.nist_all(),datasets.nist_P07(min_file=nb_file),max_finetune_epoch_NIST,ind_test=1) | 121 optimizer.finetune(datasets.nist_all(),datasets.nist_P07(),max_finetune_epoch_NIST,ind_test=1) |
122 channel.save() | 122 channel.save() |
123 if finetune_choice == 1: | 123 if finetune_choice == 1: |
124 print('\n\n\tfinetune with P07\n\n') | 124 print('\n\n\tfinetune with P07\n\n') |
125 optimizer.reload_parameters(PATH+'params_pretrain.txt') | 125 optimizer.reload_parameters(PATH+'params_pretrain.txt') |
126 optimizer.finetune(datasets.nist_P07(min_file=nb_file),datasets.nist_all(),max_finetune_epoch_P07,ind_test=0) | 126 optimizer.finetune(datasets.nist_P07(),datasets.nist_all(),max_finetune_epoch_P07,ind_test=0) |
127 channel.save() | 127 channel.save() |
128 if finetune_choice == 2: | 128 if finetune_choice == 2: |
129 print('\n\n\tfinetune with NIST followed by P07\n\n') | 129 print('\n\n\tfinetune with NIST followed by P07\n\n') |
130 optimizer.reload_parameters(PATH+'params_pretrain.txt') | 130 optimizer.reload_parameters(PATH+'params_pretrain.txt') |
131 optimizer.finetune(datasets.nist_all(),datasets.nist_P07(min_file=nb_file),max_finetune_epoch_NIST,ind_test=21) | 131 optimizer.finetune(datasets.nist_all(),datasets.nist_P07(),max_finetune_epoch_NIST,ind_test=21) |
132 optimizer.finetune(datasets.nist_P07(min_file=nb_file),datasets.nist_all(),max_finetune_epoch_P07,ind_test=20) | 132 optimizer.finetune(datasets.nist_P07(),datasets.nist_all(),max_finetune_epoch_P07,ind_test=20) |
133 channel.save() | 133 channel.save() |
134 if finetune_choice == 3: | 134 if finetune_choice == 3: |
135 print('\n\n\tfinetune with NIST only on the logistic regression on top (but validation on P07).\n\ | 135 print('\n\n\tfinetune with NIST only on the logistic regression on top (but validation on P07).\n\ |
136 All hidden units output are input of the logistic regression\n\n') | 136 All hidden units output are input of the logistic regression\n\n') |
137 optimizer.reload_parameters(PATH+'params_pretrain.txt') | 137 optimizer.reload_parameters(PATH+'params_pretrain.txt') |
138 optimizer.finetune(datasets.nist_all(),datasets.nist_P07(min_file=nb_file),max_finetune_epoch_NIST,ind_test=1,special=1) | 138 optimizer.finetune(datasets.nist_all(),datasets.nist_P07(),max_finetune_epoch_NIST,ind_test=1,special=1) |
139 | 139 |
140 | 140 |
141 if finetune_choice==-1: | 141 if finetune_choice==-1: |
142 print('\nSERIE OF 3 DIFFERENT FINETUNINGS') | 142 print('\nSERIE OF 3 DIFFERENT FINETUNINGS') |
143 print('\n\n\tfinetune with NIST\n\n') | 143 print('\n\n\tfinetune with NIST\n\n') |
144 sys.stdout.flush() | 144 sys.stdout.flush() |
145 optimizer.reload_parameters(PATH+'params_pretrain.txt') | 145 optimizer.reload_parameters(PATH+'params_pretrain.txt') |
146 optimizer.finetune(datasets.nist_all(),datasets.nist_P07(min_file=nb_file),max_finetune_epoch_NIST,ind_test=1) | 146 optimizer.finetune(datasets.nist_all(),datasets.nist_P07(),max_finetune_epoch_NIST,ind_test=1) |
147 channel.save() | 147 channel.save() |
148 print('\n\n\tfinetune with P07\n\n') | 148 print('\n\n\tfinetune with P07\n\n') |
149 sys.stdout.flush() | 149 sys.stdout.flush() |
150 optimizer.reload_parameters(PATH+'params_pretrain.txt') | 150 optimizer.reload_parameters(PATH+'params_pretrain.txt') |
151 optimizer.finetune(datasets.nist_P07(min_file=nb_file),datasets.nist_all(),max_finetune_epoch_P07,ind_test=0) | 151 optimizer.finetune(datasets.nist_P07(),datasets.nist_all(),max_finetune_epoch_P07,ind_test=0) |
152 channel.save() | 152 channel.save() |
153 print('\n\n\tfinetune with NIST (done earlier) followed by P07 (written here)\n\n') | 153 print('\n\n\tfinetune with NIST (done earlier) followed by P07 (written here)\n\n') |
154 sys.stdout.flush() | 154 sys.stdout.flush() |
155 optimizer.reload_parameters('params_finetune_NIST.txt') | 155 optimizer.reload_parameters('params_finetune_NIST.txt') |
156 optimizer.finetune(datasets.nist_P07(min_file=nb_file),datasets.nist_all(),max_finetune_epoch_P07,ind_test=20) | 156 optimizer.finetune(datasets.nist_P07(),datasets.nist_all(),max_finetune_epoch_P07,ind_test=20) |
157 channel.save() | 157 channel.save() |
158 print('\n\n\tfinetune with NIST only on the logistic regression on top.\n\ | 158 print('\n\n\tfinetune with NIST only on the logistic regression on top.\n\ |
159 All hidden units output are input of the logistic regression\n\n') | 159 All hidden units output are input of the logistic regression\n\n') |
160 sys.stdout.flush() | 160 sys.stdout.flush() |
161 optimizer.reload_parameters(PATH+'params_pretrain.txt') | 161 optimizer.reload_parameters(PATH+'params_pretrain.txt') |
162 optimizer.finetune(datasets.nist_all(),datasets.nist_P07(min_file=nb_file),max_finetune_epoch_NIST,ind_test=1,special=1) | 162 optimizer.finetune(datasets.nist_all(),datasets.nist_P07(),max_finetune_epoch_NIST,ind_test=1,special=1) |
163 channel.save() | 163 channel.save() |
164 | 164 |
165 channel.save() | 165 channel.save() |
166 | 166 |
167 return channel.COMPLETE | 167 return channel.COMPLETE |