Mercurial > pylearn
changeset 718:88f5b75a4afe
Added new option 'ignore_missing' to deal with missing values in input
author | Olivier Delalleau <delallea@iro> |
---|---|
date | Mon, 25 May 2009 11:43:50 -0400 |
parents | bf29e201515f |
children | 573e3370d0fa |
files | pylearn/algorithms/sandbox/DAA_inputs_groups.py |
diffstat | 1 files changed, 34 insertions(+), 1 deletions(-) [+] |
line wrap: on
line diff
--- a/pylearn/algorithms/sandbox/DAA_inputs_groups.py Mon May 25 11:42:25 2009 -0400 +++ b/pylearn/algorithms/sandbox/DAA_inputs_groups.py Mon May 25 11:43:50 2009 -0400 @@ -46,12 +46,26 @@ def __init__(self, input = None, auxinput = None, in_size=None, auxin_size= None, n_hid=1, regularize = False, tie_weights = False, hid_fn = 'sigmoid_act', - reconstruction_cost_function=cost.cross_entropy, interface = True,**init): + reconstruction_cost_function=cost.cross_entropy, interface = True, + ignore_missing=False, + **init): """ :param regularize: WRITEME :param tie_weights: WRITEME :param hid_fn: WRITEME :param reconstruction_cost: Should return one cost per example (row) + :param ignore_missing: if True, the input will be scanned in order to + detect missing values, and these values will be replaced by zeros. + Also, the reconstruction cost will be computed only on non missing + components. + If False, the presence of missing values may cause crashes or other + weird and unexpected behavior. + Please note that this option only affects the permanent input, not + auxilary ones (that should never contain missing values). In fact, + in the current implementation, auxiliary inputs cannot be used when + this option is True. + Another side effect of the current crappy way it is implemented is + that the reconstruction cost is not properly computed. :todo: Default noise level for all daa levels """ print '\t\t**** DAAig.__init__ ****' @@ -72,12 +86,19 @@ self.tie_weights = tie_weights self.reconstruction_cost_function = reconstruction_cost_function self.interface = interface + self.ignore_missing = ignore_missing assert hid_fn in ('sigmoid_act','tanh_act','softsign_act') self.hid_fn = eval(hid_fn) ### DECLARE MODEL VARIABLES and default self.input = input + if self.ignore_missing and self.input is not None: + no_missing = fill_missing_with_zeros(self.input) + self.input = no_missing[0] # Missing values replaced by zeros. + self.input_missing_mask = no_missing[1] # Missingness pattern. + else: + self.input_missing_mask = None self.noisy_input = None self.auxinput = auxinput self.idx_list = T.ivector('idx_list') if not(self.auxinput is None) else None @@ -243,6 +264,18 @@ return self.random.binomial(T.shape(self.input), 1, 1 - self.noise_level) * self.input def reconstruction_costs(self, rec): + if self.ignore_missing and self.input is not None: + # Note: the following code is very ugly. It is just a hack to + # ensure that the gradient w.r.t. missing coordinates is (close to) + # zero. It is neither efficient nor elegant. + # The idea is to put a very big negative value in the + # reconstruction for these missing inputs (whose target is 0), so + # that the gradient is 1/(1 - rec) ~= 0. + # This will in particular screw up the cost computations. + zero = rec * 0 + rec = (rec * T.neq(self.input_missing_mask, zero) + + (zero - 1e100) * T.eq(self.input_missing_mask, zero)) + if self.input is None: return self.reconstruction_cost_function(scaninputs(self.idx_list,self.auxinput), rec) if self.auxinput is None: