# HG changeset patch # User Razvan Pascanu # Date 1264780899 18000 # Node ID cb47cbc95a214580328a21c713165cdbdce2db11 # Parent afdd41db8152144849dbce25c5434450664cb275 I fixed a bug in the computation of L1 and L2 regularizations diff -r afdd41db8152 -r cb47cbc95a21 scripts/deepmlp.py --- a/scripts/deepmlp.py Thu Jan 28 23:03:44 2010 -0600 +++ b/scripts/deepmlp.py Fri Jan 29 11:01:39 2010 -0500 @@ -102,8 +102,8 @@ # L1 norm ; one regularization option is to enforce L1 norm to # be small - self.L1=abs(self.W[i]).sum() - self.L2_sqr=abs(self.W[i]).sum() + self.L1=abs(self.W[0]).sum() + self.L2_sqr=abs(self.W[0]).sum() for i in range(1,n_layer+1): self.L1 += abs(self.W[i]).sum() # square of L2 norm ; one regularization option is to enforce