Mercurial > pylearn
changeset 777:ba055d419bcf
change seed initialisation for random object and weigth init for logistic regression
author | Xavier Glorot <glorotxa@iro.umontreal.ca> |
---|---|
date | Sat, 13 Jun 2009 10:11:43 -0400 |
parents | 41761210d16e |
children | a985baadf74d |
files | pylearn/algorithms/sandbox/DAA_inputs_groups.py |
diffstat | 1 files changed, 7 insertions(+), 2 deletions(-) [+] |
line wrap: on
line diff
--- a/pylearn/algorithms/sandbox/DAA_inputs_groups.py Thu Jun 11 11:37:24 2009 -0400 +++ b/pylearn/algorithms/sandbox/DAA_inputs_groups.py Sat Jun 13 10:11:43 2009 -0400 @@ -557,7 +557,7 @@ self.totalupdate[i] = theano.Method(self.inputs[i],self.totalcost[i],total_grads) # if self.debugmethod: - self.representation[i] = theano.Method(self.inputs[i],self.daaig[i].clean.hidden) + self.representation[i] = theano.Method(self.inputs[i],self.daaig[i].clean.hidden_activation) self.reconstruction[i] = theano.Method(self.inputs[i],self.daaig[i].clean.rec) self.validate[i] =theano.Method(self.inputs[i], [self.daaig[i].clean.cost, self.daaig[i].clean.rec]) self.noisyinputs[i] =theano.Method(self.inputs[i], noisyout) @@ -639,8 +639,13 @@ for i in range(self.depth): print '\tLayer = ', i+1 inst.daaig[i].initialize(reg_coef = reg_coef, noise_level = noise_level,\ - noise_level_group = noise_level_group, seed = seed, alloc = alloc) + noise_level_group = noise_level_group, seed = seed + i, alloc = alloc) print '\tLayer supervised' inst.daaig[-1].initialize() + if alloc: + inst.daaig[-1].R = numpy.random.RandomState(seed+self.depth) + # init the logreg weights + inst.daaig[-1].w = inst.daaig[-1].R.uniform(size=inst.daaig[-1].w.shape,\ + low = -1/numpy.sqrt(inst.daaig[-2].n_hid), high = 1/numpy.sqrt(inst.daaig[-2].n_hid)) inst.daaig[-1].l1 = 0 inst.daaig[-1].l2 = reg_coef #only l2 norm for regularisation to be consitent with the unsup regularisation