# HG changeset patch # User Foo Bar # Date 1244944933 14400 # Node ID a985baadf74d5378278a422208db96ad5b6f2cfb # Parent ba055d419bcfcc812d0ccfd5c9fa91555f9a828d# Parent 72ce8288a2835f6a2ff56ea67552d08002f4de17 Merge diff -r 72ce8288a283 -r a985baadf74d pylearn/algorithms/sandbox/DAA_inputs_groups.py --- a/pylearn/algorithms/sandbox/DAA_inputs_groups.py Thu Jun 11 15:44:32 2009 -0400 +++ b/pylearn/algorithms/sandbox/DAA_inputs_groups.py Sat Jun 13 22:02:13 2009 -0400 @@ -558,7 +558,7 @@ self.totalupdate[i] = theano.Method(self.inputs[i],self.totalcost[i],total_grads) # if self.debugmethod: - self.representation[i] = theano.Method(self.inputs[i],self.daaig[i].clean.hidden) + self.representation[i] = theano.Method(self.inputs[i],self.daaig[i].clean.hidden_activation) self.reconstruction[i] = theano.Method(self.inputs[i],self.daaig[i].clean.rec) self.validate[i] =theano.Method(self.inputs[i], [self.daaig[i].clean.cost, self.daaig[i].clean.rec]) self.noisyinputs[i] =theano.Method(self.inputs[i], noisyout) @@ -640,8 +640,13 @@ for i in range(self.depth): print '\tLayer = ', i+1 inst.daaig[i].initialize(reg_coef = reg_coef, noise_level = noise_level,\ - noise_level_group = noise_level_group, seed = seed, alloc = alloc) + noise_level_group = noise_level_group, seed = seed + i, alloc = alloc) print '\tLayer supervised' inst.daaig[-1].initialize() + if alloc: + inst.daaig[-1].R = numpy.random.RandomState(seed+self.depth) + # init the logreg weights + inst.daaig[-1].w = inst.daaig[-1].R.uniform(size=inst.daaig[-1].w.shape,\ + low = -1/numpy.sqrt(inst.daaig[-2].n_hid), high = 1/numpy.sqrt(inst.daaig[-2].n_hid)) inst.daaig[-1].l1 = 0 inst.daaig[-1].l2 = reg_coef #only l2 norm for regularisation to be consitent with the unsup regularisation