Mercurial > ift6266
diff deep/stacked_dae/sgd_optimization.py @ 208:acb942530923
Completely rewrote my series module, now based on HDF5 and PyTables (in a separate directory called 'tables_series' for retrocompatibility of running code). Minor (inconsequential) changes to stacked_dae.
author | fsavard |
---|---|
date | Fri, 05 Mar 2010 18:07:20 -0500 |
parents | e656edaedb48 |
children | 7b4507295eba |
line wrap: on
line diff
--- a/deep/stacked_dae/sgd_optimization.py Thu Mar 04 08:21:43 2010 -0500 +++ b/deep/stacked_dae/sgd_optimization.py Fri Mar 05 18:07:20 2010 -0500 @@ -86,6 +86,8 @@ finetune_lr = self.hp.finetuning_lr,\ input_divider = self.input_divider ) + #theano.printing.pydotprint(self.classifier.pretrain_functions[0], "function.graph") + sys.stdout.flush() def train(self): @@ -96,6 +98,9 @@ print "STARTING PRETRAINING, time = ", datetime.datetime.now() sys.stdout.flush() + #time_acc_func = 0.0 + #time_acc_total = 0.0 + start_time = time.clock() ## Pre-train layer-wise for i in xrange(self.classifier.n_layers): @@ -103,7 +108,14 @@ for epoch in xrange(self.hp.pretraining_epochs_per_layer): # go through the training set for batch_index in xrange(self.n_train_batches): + #t1 = time.clock() c = self.classifier.pretrain_functions[i](batch_index) + #t2 = time.clock() + + #time_acc_func += t2 - t1 + + #if batch_index % 500 == 0: + # print "acc / total", time_acc_func / (t2 - start_time), time_acc_func self.series_mux.append("reconstruction_error", c)