# HG changeset patch # User James Bergstra # Date 1257954435 18000 # Node ID fafe796ad5fff46a99dc42e9d28661a2b38d737e # Parent bd7d540db70d990a03b1961875c5a6baa0ccd226# Parent 2fffbfa419202cd8f7efcd296d7060d7dc36a21e merge diff -r 2fffbfa41920 -r fafe796ad5ff pylearn/algorithms/logistic_regression.py --- a/pylearn/algorithms/logistic_regression.py Tue Nov 10 17:59:54 2009 -0500 +++ b/pylearn/algorithms/logistic_regression.py Wed Nov 11 10:47:15 2009 -0500 @@ -101,7 +101,7 @@ nnet.crossentropy_softmax_max_and_argmax_1hot( self.linear_output, self.target) - self.unregularized_cost = T.sum(self._xent) + self.unregularized_cost = T.mean(self._xent) self.l1_cost = self.l1 * T.sum(abs(self.w)) self.l2_cost = self.l2 * T.sum(self.w**2) self.regularized_cost = self.unregularized_cost + self.l1_cost + self.l2_cost @@ -245,12 +245,12 @@ output = nnet.sigmoid(T.dot(self.x, self.w) + self.b) xent = -self.targ * T.log(output) - (1.0 - self.targ) * T.log(1.0 - output) - sum_xent = T.sum(xent) + mean_xent = T.mean(xent) self.output = output self.xent = xent - self.sum_xent = sum_xent - self.cost = sum_xent + self.mean_xent = mean_xent + self.cost = mean_xent #define the apply method self.pred = (T.dot(self.input, self.w) + self.b) > 0.0 @@ -258,8 +258,8 @@ #if this module has any internal parameters, define an update function for them if self.params: - gparams = T.grad(sum_xent, self.params) - self.update = module.Method([self.input, self.targ], sum_xent, + gparams = T.grad(mean_xent, self.params) + self.update = module.Method([self.input, self.targ], mean_xent, updates = dict((p, p - self.lr * g) for p, g in zip(self.params, gparams)))