# HG changeset patch # User SylvainPL # Date 1268923942 14400 # Node ID a0264184684e20dc0f479040a22639b9eda2c4e7 # Parent 716c99f4eb3a1c5eee88d1ebc21bfce57bcec038 ajout de fonctionnalitees pour deux testsets diff -r 716c99f4eb3a -r a0264184684e deep/stacked_dae/v_sylvain/sgd_optimization.py --- a/deep/stacked_dae/v_sylvain/sgd_optimization.py Wed Mar 17 16:41:51 2010 -0400 +++ b/deep/stacked_dae/v_sylvain/sgd_optimization.py Thu Mar 18 10:52:22 2010 -0400 @@ -9,6 +9,7 @@ import datetime import theano.tensor as T import sys +import pickle from jobman import DD import jobman, jobman.sql @@ -121,12 +122,20 @@ #To be able to load them later for tests on finetune self.parameters_pre=[copy(x.value) for x in self.classifier.params] + f = open('params_pretrain.txt', 'w') + pickle.dump(self.parameters_pre,f) + f.close() - def finetune(self,dataset,num_finetune): + def finetune(self,dataset,dataset_test,num_finetune,ind_test): print "STARTING FINETUNING, time = ", datetime.datetime.now() minibatch_size = self.hp.minibatch_size + if ind_test == 0: + nom_test = "NIST" + else: + nom_test = "P07" + # create a function to compute the mistakes that are made by the model # on the validation set, or testing set @@ -213,6 +222,13 @@ iter = itermax(iter, self.max_minibatches) test_losses = [test_model(x,y) for x,y in iter] test_score = numpy.mean(test_losses) + + #test it on the second test set + iter2 = dataset_test.test(minibatch_size) + if self.max_minibatches: + iter2 = itermax(iter2, self.max_minibatches) + test_losses2 = [test_model(x,y) for x,y in iter2] + test_score2 = numpy.mean(test_losses2) self.series["test_error"].\ append((epoch, minibatch_index), test_score*100.) @@ -221,6 +237,11 @@ 'model %f %%') % (epoch, minibatch_index+1, test_score*100.)) + + print((' epoch %i, minibatch %i, test error on dataset %s of best ' + 'model %f %%') % + (epoch, minibatch_index+1,nom_test, + test_score2*100.)) sys.stdout.flush() @@ -243,12 +264,18 @@ print(('Optimization complete with best validation score of %f %%,' 'with test performance %f %%') % (best_validation_loss * 100., test_score*100.)) + print(('The test score on the %s dataset is %f')%(nom_test,test_score2*100.)) + print ('The finetuning ran for %f minutes' % ((end_time-start_time)/60.)) #Set parameters like they where right after pre-train def reload_parameters(self): - + + #self.parameters_pre=pickle.load('params_pretrain.txt') + f = open('params_pretrain.txt') + self.parameters_pre=pickle.load(f) + f.close() for idx,x in enumerate(self.parameters_pre): self.classifier.params[idx].value=copy(x)