comparison deep/stacked_dae/v_sylvain/sgd_optimization.py @ 275:7b4507295eba

merge
author Xavier Glorot <glorotxa@iro.umontreal.ca>
date Mon, 22 Mar 2010 10:20:10 -0400
parents a0264184684e
children a8b92a4a708d
comparison
equal deleted inserted replaced
274:44409b6652aa 275:7b4507295eba
7 import theano 7 import theano
8 import time 8 import time
9 import datetime 9 import datetime
10 import theano.tensor as T 10 import theano.tensor as T
11 import sys 11 import sys
12 import pickle
12 13
13 from jobman import DD 14 from jobman import DD
14 import jobman, jobman.sql 15 import jobman, jobman.sql
15 from copy import copy 16 from copy import copy
16 17
119 120
120 sys.stdout.flush() 121 sys.stdout.flush()
121 122
122 #To be able to load them later for tests on finetune 123 #To be able to load them later for tests on finetune
123 self.parameters_pre=[copy(x.value) for x in self.classifier.params] 124 self.parameters_pre=[copy(x.value) for x in self.classifier.params]
124 125 f = open('params_pretrain.txt', 'w')
125 126 pickle.dump(self.parameters_pre,f)
126 def finetune(self,dataset,num_finetune): 127 f.close()
128
129
130 def finetune(self,dataset,dataset_test,num_finetune,ind_test):
127 print "STARTING FINETUNING, time = ", datetime.datetime.now() 131 print "STARTING FINETUNING, time = ", datetime.datetime.now()
128 132
129 minibatch_size = self.hp.minibatch_size 133 minibatch_size = self.hp.minibatch_size
134 if ind_test == 0:
135 nom_test = "NIST"
136 else:
137 nom_test = "P07"
138
130 139
131 # create a function to compute the mistakes that are made by the model 140 # create a function to compute the mistakes that are made by the model
132 # on the validation set, or testing set 141 # on the validation set, or testing set
133 test_model = \ 142 test_model = \
134 theano.function( 143 theano.function(
211 iter = dataset.test(minibatch_size) 220 iter = dataset.test(minibatch_size)
212 if self.max_minibatches: 221 if self.max_minibatches:
213 iter = itermax(iter, self.max_minibatches) 222 iter = itermax(iter, self.max_minibatches)
214 test_losses = [test_model(x,y) for x,y in iter] 223 test_losses = [test_model(x,y) for x,y in iter]
215 test_score = numpy.mean(test_losses) 224 test_score = numpy.mean(test_losses)
225
226 #test it on the second test set
227 iter2 = dataset_test.test(minibatch_size)
228 if self.max_minibatches:
229 iter2 = itermax(iter2, self.max_minibatches)
230 test_losses2 = [test_model(x,y) for x,y in iter2]
231 test_score2 = numpy.mean(test_losses2)
216 232
217 self.series["test_error"].\ 233 self.series["test_error"].\
218 append((epoch, minibatch_index), test_score*100.) 234 append((epoch, minibatch_index), test_score*100.)
219 235
220 print((' epoch %i, minibatch %i, test error of best ' 236 print((' epoch %i, minibatch %i, test error of best '
221 'model %f %%') % 237 'model %f %%') %
222 (epoch, minibatch_index+1, 238 (epoch, minibatch_index+1,
223 test_score*100.)) 239 test_score*100.))
240
241 print((' epoch %i, minibatch %i, test error on dataset %s of best '
242 'model %f %%') %
243 (epoch, minibatch_index+1,nom_test,
244 test_score2*100.))
224 245
225 sys.stdout.flush() 246 sys.stdout.flush()
226 247
227 # useful when doing tests 248 # useful when doing tests
228 if self.max_minibatches and minibatch_index >= self.max_minibatches: 249 if self.max_minibatches and minibatch_index >= self.max_minibatches:
241 'num_finetuning_epochs':epoch}) 262 'num_finetuning_epochs':epoch})
242 263
243 print(('Optimization complete with best validation score of %f %%,' 264 print(('Optimization complete with best validation score of %f %%,'
244 'with test performance %f %%') % 265 'with test performance %f %%') %
245 (best_validation_loss * 100., test_score*100.)) 266 (best_validation_loss * 100., test_score*100.))
267 print(('The test score on the %s dataset is %f')%(nom_test,test_score2*100.))
268
246 print ('The finetuning ran for %f minutes' % ((end_time-start_time)/60.)) 269 print ('The finetuning ran for %f minutes' % ((end_time-start_time)/60.))
247 270
248 271
249 #Set parameters like they where right after pre-train 272 #Set parameters like they where right after pre-train
250 def reload_parameters(self): 273 def reload_parameters(self):
251 274
275 #self.parameters_pre=pickle.load('params_pretrain.txt')
276 f = open('params_pretrain.txt')
277 self.parameters_pre=pickle.load(f)
278 f.close()
252 for idx,x in enumerate(self.parameters_pre): 279 for idx,x in enumerate(self.parameters_pre):
253 self.classifier.params[idx].value=copy(x) 280 self.classifier.params[idx].value=copy(x)
254 281
255 282
256 283