comparison deep/stacked_dae/v_sylvain/sgd_optimization.py @ 352:cfb79f9fd1a4

Ajout d'une fonctionnalite pour pouvoir avoir un taux d'apprentissage decroissant dans le pretrain
author SylvainPL <sylvain.pannetier.lebeuf@umontreal.ca>
date Wed, 21 Apr 2010 14:50:59 -0400
parents 625c0c3fcbdb
children b599886e3655
comparison
equal deleted inserted replaced
351:799ad23a161f 352:cfb79f9fd1a4
86 86
87 def train(self): 87 def train(self):
88 self.pretrain(self.dataset) 88 self.pretrain(self.dataset)
89 self.finetune(self.dataset) 89 self.finetune(self.dataset)
90 90
91 def pretrain(self,dataset): 91 def pretrain(self,dataset,decrease=0):
92 print "STARTING PRETRAINING, time = ", datetime.datetime.now() 92 print "STARTING PRETRAINING, time = ", datetime.datetime.now()
93 sys.stdout.flush() 93 sys.stdout.flush()
94 94
95 un_fichier=int(819200.0/self.hp.minibatch_size) #Number of batches in a P07 file 95 un_fichier=int(819200.0/self.hp.minibatch_size) #Number of batches in a P07 file
96 96
97 start_time = time.clock() 97 start_time = time.clock()
98
99 ######## This is hardcoaded. THe 0.95 parameter is hardcoaded and can be changed at will ###
100 #Set the decreasing rate of the learning rate. We want the final learning rate to
101 #be 5% of the original learning rate. The decreasing factor is linear
102 decreasing = (decrease*self.hp.pretraining_lr)/float(self.hp.pretraining_epochs_per_layer*800000/self.hp.minibatch_size)
103
98 ## Pre-train layer-wise 104 ## Pre-train layer-wise
99 for i in xrange(self.classifier.n_layers): 105 for i in xrange(self.classifier.n_layers):
100 # go through pretraining epochs 106 # go through pretraining epochs
107
108 #To reset the learning rate to his original value
109 learning_rate=self.hp.pretraining_lr
101 for epoch in xrange(self.hp.pretraining_epochs_per_layer): 110 for epoch in xrange(self.hp.pretraining_epochs_per_layer):
102 # go through the training set 111 # go through the training set
103 batch_index=0 112 batch_index=0
104 count=0 113 count=0
105 num_files=0 114 num_files=0
106 for x,y in dataset.train(self.hp.minibatch_size): 115 for x,y in dataset.train(self.hp.minibatch_size):
107 c = self.classifier.pretrain_functions[i](x) 116 c = self.classifier.pretrain_functions[i](x,learning_rate)
108 count +=1 117 count +=1
109 118
110 self.series["reconstruction_error"].append((epoch, batch_index), c) 119 self.series["reconstruction_error"].append((epoch, batch_index), c)
111 batch_index+=1 120 batch_index+=1
112 121
113 #if batch_index % 100 == 0: 122 #If we need to decrease the learning rate for the pretrain
114 # print "100 batches" 123 if decrease != 0:
124 learning_rate -= decreasing
115 125
116 # useful when doing tests 126 # useful when doing tests
117 if self.max_minibatches and batch_index >= self.max_minibatches: 127 if self.max_minibatches and batch_index >= self.max_minibatches:
118 break 128 break
119 129
203 total_mb_index = 0 213 total_mb_index = 0
204 minibatch_index = 0 214 minibatch_index = 0
205 parameters_finetune=[] 215 parameters_finetune=[]
206 216
207 if ind_test == 21: 217 if ind_test == 21:
208 learning_rate = self.hp.finetuning_lr / 5.0 218 learning_rate = self.hp.finetuning_lr / 10.0
209 else: 219 else:
210 learning_rate = self.hp.finetuning_lr #The initial finetune lr 220 learning_rate = self.hp.finetuning_lr #The initial finetune lr
211 221
212 222
213 while (epoch < num_finetune) and (not done_looping): 223 while (epoch < num_finetune) and (not done_looping):