changeset 871:fafe796ad5ff

merge
author James Bergstra <bergstrj@iro.umontreal.ca>
date Wed, 11 Nov 2009 10:47:15 -0500
parents bd7d540db70d (diff) 2fffbfa41920 (current diff)
children b2821fce15de
files
diffstat 1 files changed, 6 insertions(+), 6 deletions(-) [+]
line wrap: on
line diff
--- a/pylearn/algorithms/logistic_regression.py	Tue Nov 10 17:59:54 2009 -0500
+++ b/pylearn/algorithms/logistic_regression.py	Wed Nov 11 10:47:15 2009 -0500
@@ -101,7 +101,7 @@
                     nnet.crossentropy_softmax_max_and_argmax_1hot(
                     self.linear_output, self.target)
 
-        self.unregularized_cost = T.sum(self._xent)
+        self.unregularized_cost = T.mean(self._xent)
         self.l1_cost = self.l1 * T.sum(abs(self.w))
         self.l2_cost = self.l2 * T.sum(self.w**2)
         self.regularized_cost = self.unregularized_cost + self.l1_cost + self.l2_cost
@@ -245,12 +245,12 @@
 
         output = nnet.sigmoid(T.dot(self.x, self.w) + self.b)
         xent = -self.targ * T.log(output) - (1.0 - self.targ) * T.log(1.0 - output)
-        sum_xent = T.sum(xent)
+        mean_xent = T.mean(xent)
 
         self.output = output
         self.xent = xent
-        self.sum_xent = sum_xent
-        self.cost = sum_xent
+        self.mean_xent = mean_xent
+        self.cost = mean_xent
 
         #define the apply method
         self.pred = (T.dot(self.input, self.w) + self.b) > 0.0
@@ -258,8 +258,8 @@
 
         #if this module has any internal parameters, define an update function for them
         if self.params:
-            gparams = T.grad(sum_xent, self.params)
-            self.update = module.Method([self.input, self.targ], sum_xent,
+            gparams = T.grad(mean_xent, self.params)
+            self.update = module.Method([self.input, self.targ], mean_xent,
                                         updates = dict((p, p - self.lr * g) for p, g in zip(self.params, gparams)))