comparison deep/stacked_dae/v2/sgd_optimization.py @ 228:851e7ad4a143

Corrigé une erreur dans la formule de coût modifiée dans stacked_dae, et enlevé des timers dans sgd_optimization
author fsavard
date Fri, 12 Mar 2010 10:47:36 -0500
parents acae439d6572
children 02eb98d051fe
comparison
equal deleted inserted replaced
227:acae439d6572 228:851e7ad4a143
102 102
103 def pretrain(self): 103 def pretrain(self):
104 print "STARTING PRETRAINING, time = ", datetime.datetime.now() 104 print "STARTING PRETRAINING, time = ", datetime.datetime.now()
105 sys.stdout.flush() 105 sys.stdout.flush()
106 106
107 time_acc_func = 0.0
108 time_acc_total = 0.0
109
110 start_time = time.clock() 107 start_time = time.clock()
111 ## Pre-train layer-wise 108 ## Pre-train layer-wise
112 for i in xrange(self.classifier.n_layers): 109 for i in xrange(self.classifier.n_layers):
113 # go through pretraining epochs 110 # go through pretraining epochs
114 for epoch in xrange(self.hp.pretraining_epochs_per_layer): 111 for epoch in xrange(self.hp.pretraining_epochs_per_layer):
115 # go through the training set 112 # go through the training set
116 for batch_index in xrange(self.n_train_batches): 113 for batch_index in xrange(self.n_train_batches):
117 t1 = time.clock()
118 c = self.classifier.pretrain_functions[i](batch_index) 114 c = self.classifier.pretrain_functions[i](batch_index)
119 t2 = time.clock()
120
121 time_acc_func += t2 - t1
122
123 if batch_index % 500 == 0:
124 print "acc / total", time_acc_func / (t2 - start_time), time_acc_func
125 115
126 self.series["reconstruction_error"].append((epoch, batch_index), c) 116 self.series["reconstruction_error"].append((epoch, batch_index), c)
127 117
128 print 'Pre-training layer %i, epoch %d, cost '%(i,epoch),c 118 print 'Pre-training layer %i, epoch %d, cost '%(i,epoch),c
129 sys.stdout.flush() 119 sys.stdout.flush()