# HG changeset patch # User Xavier Glorot # Date 1250177967 14400 # Node ID ecae21a7262ef03d0645e1c8cd4b4ae508c6f758 # Parent 3a4bc4a0dbf4ff9bdf1ade2c4a0d8a4be6eea566 Added a rescalwsaturation instance method for DAA inputs groups diff -r 3a4bc4a0dbf4 -r ecae21a7262e pylearn/algorithms/sandbox/DAA_inputs_groups.py --- a/pylearn/algorithms/sandbox/DAA_inputs_groups.py Wed Aug 12 19:19:24 2009 -0400 +++ b/pylearn/algorithms/sandbox/DAA_inputs_groups.py Thu Aug 13 11:39:27 2009 -0400 @@ -540,7 +540,7 @@ # facultative methods if self.debugmethod: - self.activation = [None] * (self.depth) + self.activation = [None] * (self.depth+1) self.representation = [None] * (self.depth) self.recactivation = [None] * (self.depth) self.reconstruction = [None] * (self.depth) @@ -706,6 +706,7 @@ self.NLL = theano.Method(self.inputs[-1],self.daaig[-1]._xent) if self.debugmethod: + self.activation[-1] = theano.Method(self.inputs[-2],self.daaig[-1].linear_output) self.compute_localcost[-1] = theano.Method(self.inputs[-1],self.localcost[-1]) self.compute_localgradients[-1] = theano.Method(self.inputs[-1],self.localgradients[-1]) self.compute_globalcost[-1] = theano.Method(self.inputs[-1],self.globalcost[-1]) @@ -827,6 +828,23 @@ def _instance_nll(self,inst,inputs,target): return numpy.sum(inst.NLL(*(inputs+[target]))) / float(len(target)) + #try-------------------------------------------------------------------- + def _instance_rescalwsaturation(self,inst,inputs): + sat = [None]*(self.depth+1) + for i in range(self.depth+1): + sat[i] = inst.hidsaturation(i,inputs[min(i,self.depth)]) + + for i in range(self.depth-1): + if sat[i+1] > max(sat[:i+1]): + inst.daaig[i+1].wenc = inst.daaig[i+1].wenc/sat[i+1]*max(sat[:i+1]) + inst.daaig[i+1].benc = inst.daaig[i+1].benc/sat[i+1]*max(sat[:i+1]) + sat[i+1] = max(sat[:i+1]) + if sat[-1]>max(sat[:-1]): + inst.daaig[-1].w = inst.daaig[i+1].w/sat[-1]*max(sat[:-1]) + inst.daaig[-1].b = inst.daaig[i+1].b/sat[-1]*max(sat[:-1]) + + #----------------------------------------------------------------------- + def _instance_unsupgrad(self,inst,inputs,layer,param_name): inst.noiseseed(0) gradin = inst.compute_localgradients_in[layer](*inputs)