comparison deep/stacked_dae/sgd_optimization.py @ 191:3632e6258642

Ajouts mineurs à stacked_dae, juste printé l'heure je crois.
author fsavard
date Tue, 02 Mar 2010 14:47:18 -0500
parents d364a130b221
children e656edaedb48
comparison
equal deleted inserted replaced
190:70a9df1cd20e 191:3632e6258642
4 # Generic SdA optimization loop, adapted from the deeplearning.net tutorial 4 # Generic SdA optimization loop, adapted from the deeplearning.net tutorial
5 5
6 import numpy 6 import numpy
7 import theano 7 import theano
8 import time 8 import time
9 import datetime
9 import theano.tensor as T 10 import theano.tensor as T
10 import sys 11 import sys
11 12
12 from jobman import DD 13 from jobman import DD
13 import jobman, jobman.sql 14 import jobman, jobman.sql
83 def train(self): 84 def train(self):
84 self.pretrain() 85 self.pretrain()
85 self.finetune() 86 self.finetune()
86 87
87 def pretrain(self): 88 def pretrain(self):
88 print "STARTING PRETRAINING" 89 print "STARTING PRETRAINING, time = ", datetime.datetime.now()
89 sys.stdout.flush() 90 sys.stdout.flush()
90 91
91 start_time = time.clock() 92 start_time = time.clock()
92 ## Pre-train layer-wise 93 ## Pre-train layer-wise
93 for i in xrange(self.classifier.n_layers): 94 for i in xrange(self.classifier.n_layers):
99 100
100 self.series_mux.append("reconstruction_error", c) 101 self.series_mux.append("reconstruction_error", c)
101 102
102 print 'Pre-training layer %i, epoch %d, cost '%(i,epoch),c 103 print 'Pre-training layer %i, epoch %d, cost '%(i,epoch),c
103 sys.stdout.flush() 104 sys.stdout.flush()
105
106 self.series_mux.append("params", self.classifier.all_params)
104 107
105 end_time = time.clock() 108 end_time = time.clock()
106 109
107 print ('Pretraining took %f minutes' %((end_time-start_time)/60.)) 110 print ('Pretraining took %f minutes' %((end_time-start_time)/60.))
108 self.hp.update({'pretraining_time': end_time-start_time}) 111 self.hp.update({'pretraining_time': end_time-start_time})
109 112
110 sys.stdout.flush() 113 sys.stdout.flush()
111 114
112 def finetune(self): 115 def finetune(self):
113 print "STARTING FINETUNING" 116 print "STARTING FINETUNING, time = ", datetime.datetime.now()
114 117
115 index = T.lscalar() # index to a [mini]batch 118 index = T.lscalar() # index to a [mini]batch
116 minibatch_size = self.hp.minibatch_size 119 minibatch_size = self.hp.minibatch_size
117 120
118 # create a function to compute the mistakes that are made by the model 121 # create a function to compute the mistakes that are made by the model