changeset 209:d982dfa583df

Merge
author fsavard
date Fri, 05 Mar 2010 18:08:34 -0500
parents acb942530923 (current diff) 43af74a348ac (diff)
children dc0d77c8a878
files
diffstat 1 files changed, 9 insertions(+), 4 deletions(-) [+]
line wrap: on
line diff
--- a/deep/autoencoder/DA_training.py	Fri Mar 05 18:07:20 2010 -0500
+++ b/deep/autoencoder/DA_training.py	Fri Mar 05 18:08:34 2010 -0500
@@ -93,7 +93,12 @@
         theano_rng = RandomStreams()
         # create a numpy random generator
         numpy_rng = numpy.random.RandomState()
-        
+		
+        # print the parameter of the DA
+        if True :
+            print 'input size = %d' %n_visible
+            print 'hidden size = %d' %n_hidden
+            print 'complexity = %2.2f' %complexity
          
         # initial values for weights and biases
         # note : W' was written as `W_prime` and b' as `b_prime`
@@ -250,7 +255,7 @@
 
     # construct the denoising autoencoder class
     n_ins = 32*32
-    encoder = dA(n_ins, n_code_layer, input = x.reshape((batch_size,n_ins)))
+    encoder = dA(n_ins, n_code_layer, complexity, input = x.reshape((batch_size,n_ins)))
 
     # Train autoencoder
     
@@ -363,7 +368,7 @@
                               test_score))
 
         if patience <= iter :
-                print('iter (%i) is superior than patience(%i). break', iter, patience)
+                print('iter (%i) is superior than patience(%i). break', (iter, patience))
                 break
 
         
@@ -451,7 +456,7 @@
 
     # construct the denoising autoencoder class
     n_ins = 28*28
-    encoder = dA(n_ins, n_code_layer, input = x.reshape((batch_size,n_ins)))
+    encoder = dA(n_ins, n_code_layer, complexity, input = x.reshape((batch_size,n_ins)))
 
     # Train autoencoder