changeset 759:61a3608d5767

Merged
author Olivier Delalleau <delallea@iro>
date Tue, 02 Jun 2009 22:26:29 -0400
parents c60ad32e1f40 (diff) 8447bc9bb2d4 (current diff)
children 60394c460390
files
diffstat 1 files changed, 13 insertions(+), 12 deletions(-) [+]
line wrap: on
line diff
--- a/pylearn/algorithms/sandbox/DAA_inputs_groups.py	Tue Jun 02 21:15:41 2009 -0400
+++ b/pylearn/algorithms/sandbox/DAA_inputs_groups.py	Tue Jun 02 22:26:29 2009 -0400
@@ -195,13 +195,7 @@
         container.hidden = self.hid_fn(container.hidden_activation)
         self.define_propdown(container, idx_list , auxinput)
         container.rec = self.hid_fn(container.rec_activation)
-        if (self.ignore_missing is not None and self.input is not None and not
-                self.reconstruct_missing):
-            # Apply mask to gradient to ensure we do not backpropagate on the
-            # cost computed on missing inputs (that were replaced with zeros).
-            container.rec = mask_gradient(container.rec,
-                    self.input_missing_mask)
-        
+       
     def define_propup(self, container, input, idx_list, auxinput):
         if self.input is not None:
             container.hidden_activation = self.filter_up(input, self.wenc, self.benc)
@@ -226,7 +220,14 @@
                 container.rec_activation = rec_activation1
             else:
                 container.rec_activation = rec_activation2
-        
+
+        if (self.ignore_missing is not None and self.input is not None and not
+                self.reconstruct_missing):
+            # Apply mask to gradient to ensure we do not backpropagate on the
+            # cost computed on missing inputs (that have been imputed).
+            container.rec_activation = mask_gradient(container.rec_activation,
+                    self.input_missing_mask)
+  
     def filter_up(self, vis, w, b=None):
         out = T.dot(vis, w)
         return out + b if b else out
@@ -315,11 +316,11 @@
             mask = self.random.binomial(T.shape(self.input), 1, 1 - self.noise_level)
         elif self.corruption_pattern == 'by_pair':
             shape = T.shape(self.input)
-            scale = numpy.ones(2)
-            scale[1] = 2
-            shape = shape / scale
+            # Do not ask me why, but just doing "/ 2" does not work (there is
+            # a bug in the optimizer).
+            shape = T.stack(shape[0], (shape[1] * 2) / 4)
             mask = self.random.binomial(shape, 1, 1 - self.noise_level)
-            mask = T.hstack((mask, mask))
+            mask = T.horizontal_stack(mask, mask)
         else:
             raise ValueError('Unknown value for corruption_pattern: %s'
                     % self.corruption_pattern)