comparison deep/stacked_dae/sgd_optimization.py @ 208:acb942530923

Completely rewrote my series module, now based on HDF5 and PyTables (in a separate directory called 'tables_series' for retrocompatibility of running code). Minor (inconsequential) changes to stacked_dae.
author fsavard
date Fri, 05 Mar 2010 18:07:20 -0500
parents e656edaedb48
children 7b4507295eba
comparison
equal deleted inserted replaced
205:10a801240bfc 208:acb942530923
84 rng = self.rng,\ 84 rng = self.rng,\
85 pretrain_lr = self.hp.pretraining_lr, \ 85 pretrain_lr = self.hp.pretraining_lr, \
86 finetune_lr = self.hp.finetuning_lr,\ 86 finetune_lr = self.hp.finetuning_lr,\
87 input_divider = self.input_divider ) 87 input_divider = self.input_divider )
88 88
89 #theano.printing.pydotprint(self.classifier.pretrain_functions[0], "function.graph")
90
89 sys.stdout.flush() 91 sys.stdout.flush()
90 92
91 def train(self): 93 def train(self):
92 self.pretrain() 94 self.pretrain()
93 self.finetune() 95 self.finetune()
94 96
95 def pretrain(self): 97 def pretrain(self):
96 print "STARTING PRETRAINING, time = ", datetime.datetime.now() 98 print "STARTING PRETRAINING, time = ", datetime.datetime.now()
97 sys.stdout.flush() 99 sys.stdout.flush()
100
101 #time_acc_func = 0.0
102 #time_acc_total = 0.0
98 103
99 start_time = time.clock() 104 start_time = time.clock()
100 ## Pre-train layer-wise 105 ## Pre-train layer-wise
101 for i in xrange(self.classifier.n_layers): 106 for i in xrange(self.classifier.n_layers):
102 # go through pretraining epochs 107 # go through pretraining epochs
103 for epoch in xrange(self.hp.pretraining_epochs_per_layer): 108 for epoch in xrange(self.hp.pretraining_epochs_per_layer):
104 # go through the training set 109 # go through the training set
105 for batch_index in xrange(self.n_train_batches): 110 for batch_index in xrange(self.n_train_batches):
111 #t1 = time.clock()
106 c = self.classifier.pretrain_functions[i](batch_index) 112 c = self.classifier.pretrain_functions[i](batch_index)
113 #t2 = time.clock()
114
115 #time_acc_func += t2 - t1
116
117 #if batch_index % 500 == 0:
118 # print "acc / total", time_acc_func / (t2 - start_time), time_acc_func
107 119
108 self.series_mux.append("reconstruction_error", c) 120 self.series_mux.append("reconstruction_error", c)
109 121
110 print 'Pre-training layer %i, epoch %d, cost '%(i,epoch),c 122 print 'Pre-training layer %i, epoch %d, cost '%(i,epoch),c
111 sys.stdout.flush() 123 sys.stdout.flush()