# HG changeset patch # User Xavier Glorot # Date 1244902303 14400 # Node ID ba055d419bcfcc812d0ccfd5c9fa91555f9a828d # Parent 41761210d16e7d3d374209a3644c8fb038d8090a change seed initialisation for random object and weigth init for logistic regression diff -r 41761210d16e -r ba055d419bcf pylearn/algorithms/sandbox/DAA_inputs_groups.py --- a/pylearn/algorithms/sandbox/DAA_inputs_groups.py Thu Jun 11 11:37:24 2009 -0400 +++ b/pylearn/algorithms/sandbox/DAA_inputs_groups.py Sat Jun 13 10:11:43 2009 -0400 @@ -557,7 +557,7 @@ self.totalupdate[i] = theano.Method(self.inputs[i],self.totalcost[i],total_grads) # if self.debugmethod: - self.representation[i] = theano.Method(self.inputs[i],self.daaig[i].clean.hidden) + self.representation[i] = theano.Method(self.inputs[i],self.daaig[i].clean.hidden_activation) self.reconstruction[i] = theano.Method(self.inputs[i],self.daaig[i].clean.rec) self.validate[i] =theano.Method(self.inputs[i], [self.daaig[i].clean.cost, self.daaig[i].clean.rec]) self.noisyinputs[i] =theano.Method(self.inputs[i], noisyout) @@ -639,8 +639,13 @@ for i in range(self.depth): print '\tLayer = ', i+1 inst.daaig[i].initialize(reg_coef = reg_coef, noise_level = noise_level,\ - noise_level_group = noise_level_group, seed = seed, alloc = alloc) + noise_level_group = noise_level_group, seed = seed + i, alloc = alloc) print '\tLayer supervised' inst.daaig[-1].initialize() + if alloc: + inst.daaig[-1].R = numpy.random.RandomState(seed+self.depth) + # init the logreg weights + inst.daaig[-1].w = inst.daaig[-1].R.uniform(size=inst.daaig[-1].w.shape,\ + low = -1/numpy.sqrt(inst.daaig[-2].n_hid), high = 1/numpy.sqrt(inst.daaig[-2].n_hid)) inst.daaig[-1].l1 = 0 inst.daaig[-1].l2 = reg_coef #only l2 norm for regularisation to be consitent with the unsup regularisation