Mercurial > pylearn
changeset 757:2a30f22ef7ff
Fixed bug introduced in previous change meant to improve numerical stability
author | Olivier Delalleau <delallea@iro> |
---|---|
date | Tue, 02 Jun 2009 14:40:43 -0400 |
parents | 84d22b7d835a |
children | c60ad32e1f40 |
files | pylearn/algorithms/sandbox/DAA_inputs_groups.py |
diffstat | 1 files changed, 9 insertions(+), 8 deletions(-) [+] |
line wrap: on
line diff
--- a/pylearn/algorithms/sandbox/DAA_inputs_groups.py Tue Jun 02 13:51:22 2009 -0400 +++ b/pylearn/algorithms/sandbox/DAA_inputs_groups.py Tue Jun 02 14:40:43 2009 -0400 @@ -195,13 +195,7 @@ container.hidden = self.hid_fn(container.hidden_activation) self.define_propdown(container, idx_list , auxinput) container.rec = self.hid_fn(container.rec_activation) - if (self.ignore_missing is not None and self.input is not None and not - self.reconstruct_missing): - # Apply mask to gradient to ensure we do not backpropagate on the - # cost computed on missing inputs (that were replaced with zeros). - container.rec = mask_gradient(container.rec, - self.input_missing_mask) - + def define_propup(self, container, input, idx_list, auxinput): if self.input is not None: container.hidden_activation = self.filter_up(input, self.wenc, self.benc) @@ -226,7 +220,14 @@ container.rec_activation = rec_activation1 else: container.rec_activation = rec_activation2 - + + if (self.ignore_missing is not None and self.input is not None and not + self.reconstruct_missing): + # Apply mask to gradient to ensure we do not backpropagate on the + # cost computed on missing inputs (that have been imputed). + container.rec_activation = mask_gradient(container.rec_activation, + self.input_missing_mask) + def filter_up(self, vis, w, b=None): out = T.dot(vis, w) return out + b if b else out