comparison deep/stacked_dae/v_sylvain/sgd_optimization.py @ 350:625c0c3fcbdb

Amelioration de l'efficacite de la sauvegarde des parametres
author SylvainPL <sylvain.pannetier.lebeuf@umontreal.ca>
date Tue, 20 Apr 2010 16:38:13 -0400
parents a79db7cee035
children cfb79f9fd1a4
comparison
equal deleted inserted replaced
349:22efb4968054 350:625c0c3fcbdb
7 import theano 7 import theano
8 import time 8 import time
9 import datetime 9 import datetime
10 import theano.tensor as T 10 import theano.tensor as T
11 import sys 11 import sys
12 import pickle 12 #import pickle
13 import cPickle
13 14
14 from jobman import DD 15 from jobman import DD
15 import jobman, jobman.sql 16 import jobman, jobman.sql
16 from copy import copy 17 from copy import copy
17 18
139 sys.stdout.flush() 140 sys.stdout.flush()
140 141
141 #To be able to load them later for tests on finetune 142 #To be able to load them later for tests on finetune
142 self.parameters_pre=[copy(x.value) for x in self.classifier.params] 143 self.parameters_pre=[copy(x.value) for x in self.classifier.params]
143 f = open('params_pretrain.txt', 'w') 144 f = open('params_pretrain.txt', 'w')
144 pickle.dump(self.parameters_pre,f) 145 cPickle.dump(self.parameters_pre,f,protocol=-1)
145 f.close() 146 f.close()
146 147
147 148
148 def finetune(self,dataset,dataset_test,num_finetune,ind_test,special=0,decrease=0): 149 def finetune(self,dataset,dataset_test,num_finetune,ind_test,special=0,decrease=0):
149 150
293 # useful when doing tests 294 # useful when doing tests
294 if self.max_minibatches and minibatch_index >= self.max_minibatches: 295 if self.max_minibatches and minibatch_index >= self.max_minibatches:
295 break 296 break
296 297
297 if decrease == 1: 298 if decrease == 1:
298 if (ind_test == 21 & epoch % 100 == 0) | ind_test == 20: 299 if (ind_test == 21 & epoch % 100 == 0) | ind_test == 20:
299 learning_rate /= 2 #divide the learning rate by 2 for each new epoch of P07 (or 100 of NIST) 300 learning_rate /= 2 #divide the learning rate by 2 for each new epoch of P07 (or 100 of NIST)
300 301
301 self.series['params'].append((epoch,), self.classifier.all_params) 302 self.series['params'].append((epoch,), self.classifier.all_params)
302 303
303 if done_looping == True: #To exit completly the fine-tuning 304 if done_looping == True: #To exit completly the fine-tuning
304 break #to exit the WHILE loop 305 break #to exit the WHILE loop
320 321
321 #Save a copy of the parameters in a file to be able to get them in the future 322 #Save a copy of the parameters in a file to be able to get them in the future
322 323
323 if special == 1: #To keep a track of the value of the parameters 324 if special == 1: #To keep a track of the value of the parameters
324 f = open('params_finetune_stanford.txt', 'w') 325 f = open('params_finetune_stanford.txt', 'w')
325 pickle.dump(parameters_finetune,f) 326 cPickle.dump(parameters_finetune,f,protocol=-1)
326 f.close() 327 f.close()
327 328
328 elif ind_test == 0 | ind_test == 20: #To keep a track of the value of the parameters 329 elif ind_test == 0 | ind_test == 20: #To keep a track of the value of the parameters
329 f = open('params_finetune_P07.txt', 'w') 330 f = open('params_finetune_P07.txt', 'w')
330 pickle.dump(parameters_finetune,f) 331 cPickle.dump(parameters_finetune,f,protocol=-1)
331 f.close() 332 f.close()
332 333
333 334
334 elif ind_test== 1: #For the run with 2 finetunes. It will be faster. 335 elif ind_test== 1: #For the run with 2 finetunes. It will be faster.
335 f = open('params_finetune_NIST.txt', 'w') 336 f = open('params_finetune_NIST.txt', 'w')
336 pickle.dump(parameters_finetune,f) 337 cPickle.dump(parameters_finetune,f,protocol=-1)
337 f.close() 338 f.close()
338 339
339 elif ind_test== 21: #To keep a track of the value of the parameters 340 elif ind_test== 21: #To keep a track of the value of the parameters
340 f = open('params_finetune_P07_then_NIST.txt', 'w') 341 f = open('params_finetune_P07_then_NIST.txt', 'w')
341 pickle.dump(parameters_finetune,f) 342 cPickle.dump(parameters_finetune,f,protocol=-1)
342 f.close() 343 f.close()
343 344
344 345
345 #Set parameters like they where right after pre-train or finetune 346 #Set parameters like they where right after pre-train or finetune
346 def reload_parameters(self,which): 347 def reload_parameters(self,which):
347 348
348 #self.parameters_pre=pickle.load('params_pretrain.txt') 349 #self.parameters_pre=pickle.load('params_pretrain.txt')
349 f = open(which) 350 f = open(which)
350 self.parameters_pre=pickle.load(f) 351 self.parameters_pre=cPickle.load(f)
351 f.close() 352 f.close()
352 for idx,x in enumerate(self.parameters_pre): 353 for idx,x in enumerate(self.parameters_pre):
353 if x.dtype=='float64': 354 if x.dtype=='float64':
354 self.classifier.params[idx].value=theano._asarray(copy(x),dtype=theano.config.floatX) 355 self.classifier.params[idx].value=theano._asarray(copy(x),dtype=theano.config.floatX)
355 else: 356 else: