Mercurial > pylearn
changeset 765:c95a56f055aa
added a totallocal_update method to StackedDAAig
author | Xavier Glorot <glorotxa@iro.umontreal.ca> |
---|---|
date | Fri, 05 Jun 2009 15:11:53 -0400 |
parents | f02dc24dad8f |
children | d7d8877e03f8 |
files | pylearn/algorithms/sandbox/DAA_inputs_groups.py |
diffstat | 1 files changed, 9 insertions(+), 0 deletions(-) [+] |
line wrap: on
line diff
--- a/pylearn/algorithms/sandbox/DAA_inputs_groups.py Wed Jun 03 14:25:56 2009 -0400 +++ b/pylearn/algorithms/sandbox/DAA_inputs_groups.py Fri Jun 05 15:11:53 2009 -0400 @@ -607,8 +607,17 @@ self.globalupdate[-1] = theano.Method(self.inputs[-1],self.globalcost[-1],global_grads) if self.totalupdatebool: self.totalupdate[-1] = theano.Method(self.inputs[-1],self.totalcost[-1],total_grads) + + totallocal_grads={} + for k in range(self.depth): + totallocal_grads.update(dict((j, j - self.unsup_lr * g) for j,g in + zip(self.daaig[k].params,self.localgradients[k]))) + totallocal_grads.update(dict((j, j - self.sup_lr * g) for j,g in zip(self.daaig[-1].params,self.localgradients[-1]))) + self.totallocalupdate = theano.Method(self.inputs[-1],sum(self.localcost[:]),totallocal_grads) + self.classify = theano.Method(self.inputs[-2],self.daaig[-1].argmax_standalone) self.NLL = theano.Method(self.inputs[-1],self.daaig[-1]._xent) + if self.debugmethod: self.compute_localcost[-1] = theano.Method(self.inputs[-1],self.localcost[-1])