# HG changeset patch # User Joseph Turian # Date 1229723211 18000 # Node ID 3f9ec536f2c166ba4e4b495cfdbea6ecf8ceae30 # Parent f8d29730f1462146733c2a0d55fef9515cd18e48 Two minor fixes. diff -r f8d29730f146 -r 3f9ec536f2c1 pylearn/algorithms/cost.py --- a/pylearn/algorithms/cost.py Wed Dec 17 18:16:30 2008 -0500 +++ b/pylearn/algorithms/cost.py Fri Dec 19 16:46:51 2008 -0500 @@ -10,7 +10,6 @@ """ import theano.tensor as T -from xlogx import xlogx def quadratic(target, output, axis=1): return T.mean(T.sqr(target - output), axis=axis) @@ -28,5 +27,5 @@ different shapes then the result will be garbled. """ return -(target * T.log(output) + (1 - target) * T.log(1 - output)) \ - + (xlogx(target) + xlogx(1 - target)) + + (T.xlogx(target) + T.xlogx(1 - target)) # return cross_entropy(target, output, axis) - cross_entropy(target, target, axis) diff -r f8d29730f146 -r 3f9ec536f2c1 pylearn/algorithms/logistic_regression.py --- a/pylearn/algorithms/logistic_regression.py Wed Dec 17 18:16:30 2008 -0500 +++ b/pylearn/algorithms/logistic_regression.py Fri Dec 19 16:46:51 2008 -0500 @@ -48,7 +48,7 @@ else: # TODO: when above is fixed, remove this hack (need an argmax # which is independent of targets) - self.argmax_standalone = T.argmax(self.linear_output); + self.argmax_standalone = T.argmax(self.linear_output) (self._xent, self.softmax, self._max_pr, self.argmax) =\ nnet.crossentropy_softmax_max_and_argmax_1hot( self.linear_output, self.target)