# HG changeset patch # User Xavier Glorot # Date 1245629783 14400 # Node ID 2c159439c47ce790add1f87b2fa53285ca2a2dd9 # Parent a985baadf74d5378278a422208db96ad5b6f2cfb Sigmoid before logistic regression to avoid small gradient with tanh in StackedDAAig diff -r a985baadf74d -r 2c159439c47c pylearn/algorithms/sandbox/DAA_inputs_groups.py --- a/pylearn/algorithms/sandbox/DAA_inputs_groups.py Sat Jun 13 22:02:13 2009 -0400 +++ b/pylearn/algorithms/sandbox/DAA_inputs_groups.py Sun Jun 21 20:16:23 2009 -0400 @@ -578,7 +578,7 @@ # supervised layer print '\tLayer supervised init' self.inputs[-1] = copy.copy(self.inputs[-2])+[self.target] - self.daaig[-1] = LogRegN(in_sizeprec,self.n_out,inputprec,self.target) + self.daaig[-1] = LogRegN(in_sizeprec,self.n_out,sigmoid_act(self.daaig[-2].clean.hidden_activation),self.target) paramstot += self.daaig[-1].params if self.regularize: