diff deep/stacked_dae/sgd_optimization.py @ 191:3632e6258642

Ajouts mineurs à stacked_dae, juste printé l'heure je crois.
author fsavard
date Tue, 02 Mar 2010 14:47:18 -0500
parents d364a130b221
children e656edaedb48
line wrap: on
line diff
--- a/deep/stacked_dae/sgd_optimization.py	Tue Mar 02 09:52:27 2010 -0500
+++ b/deep/stacked_dae/sgd_optimization.py	Tue Mar 02 14:47:18 2010 -0500
@@ -6,6 +6,7 @@
 import numpy 
 import theano
 import time
+import datetime
 import theano.tensor as T
 import sys
 
@@ -85,7 +86,7 @@
         self.finetune()
 
     def pretrain(self):
-        print "STARTING PRETRAINING"
+        print "STARTING PRETRAINING, time = ", datetime.datetime.now()
         sys.stdout.flush()
 
         start_time = time.clock()  
@@ -101,6 +102,8 @@
                         
                 print 'Pre-training layer %i, epoch %d, cost '%(i,epoch),c
                 sys.stdout.flush()
+
+                self.series_mux.append("params", self.classifier.all_params)
      
         end_time = time.clock()
 
@@ -110,7 +113,7 @@
         sys.stdout.flush()
 
     def finetune(self):
-        print "STARTING FINETUNING"
+        print "STARTING FINETUNING, time = ", datetime.datetime.now()
 
         index   = T.lscalar()    # index to a [mini]batch 
         minibatch_size = self.hp.minibatch_size