# HG changeset patch # User Xavier Glorot # Date 1242924881 14400 # Node ID 521c04f8d2b349edbb964d825662a9275b9c3f83 # Parent 9078561a7c21c98f476d01228bf6cd8ce3498a5c better definition of parameters for DAAig diff -r 9078561a7c21 -r 521c04f8d2b3 pylearn/algorithms/sandbox/DAA_inputs_groups.py --- a/pylearn/algorithms/sandbox/DAA_inputs_groups.py Thu May 21 12:18:36 2009 -0400 +++ b/pylearn/algorithms/sandbox/DAA_inputs_groups.py Thu May 21 12:54:41 2009 -0400 @@ -175,10 +175,10 @@ self.Maskdown = [self.Maskdown] listweights = [] listweightsenc = [] - if self.input is None: + if not(self.auxinput is None): listweights += [w*m for w,m in zip(self.Maskup,self.wauxenc)] + [w*m for w,m in zip(self.Maskdown,self.wauxdec)] listweightsenc += [w*m for w,m in zip(self.Maskup,self.wauxenc)] - if self.auxinput is None: + if not(self.input is None): listweights += [self.wenc,self.wdec] listweightsenc += [self.wenc] self.regularization = self.reg_coef * get_reg_cost(listweights,'l2') @@ -198,16 +198,18 @@ if not hasattr(self,'params'): self.params = [] self.params += [self.benc] + self.paramsenc = self.params if not(self.input is None): self.params += [self.wenc] + [self.bdec] + self.paramsenc += [self.wenc] if not(self.auxinput is None): self.params += self.wauxenc + self.bauxdec - self.paramsenc = self.params + self.paramsenc += self.wauxenc if not(self.tie_weights): if not(self.input is None): - self.params += [self.bdec] + self.params += [self.wdec] if not(self.auxinput is None): - self.params += self.wauxdec + self.bauxdec + self.params += self.wauxdec # DEPENDENCY: define_cost, define_gradients def define_gradients(self):