# HG changeset patch # User James Bergstra # Date 1232571727 18000 # Node ID 83397981a118c59add2a1ee333432f3d3d029bef # Parent 9fb784c1f23d1bd2ebd5a47ea032d6d42ed833ca# Parent 89bc88affef047e62b1858f3876c944d6ebf7319 merge diff -r 9fb784c1f23d -r 83397981a118 pylearn/algorithms/cost.py --- a/pylearn/algorithms/cost.py Wed Jan 21 16:01:16 2009 -0500 +++ b/pylearn/algorithms/cost.py Wed Jan 21 16:02:07 2009 -0500 @@ -14,12 +14,13 @@ def quadratic(target, output, axis=1): return T.mean(T.sqr(target - output), axis=axis) -def cross_entropy(target, output, axis=1): +def cross_entropy(target, output, mean_axis=0, sum_axis=1): """ @todo: This is essentially duplicated as nnet_ops.binary_crossentropy @warning: OUTPUT and TARGET are reversed in nnet_ops.binary_crossentropy """ - return -T.mean(target * T.log(output) + (1 - target) * T.log(1 - output), axis=axis) + XE = target * T.log(output) + (1 - target) * T.log(1 - output) + return -T.mean(T.sum(XE, axis=sum_axis),axis=mean_axis) def KL_divergence(target, output): """ diff -r 9fb784c1f23d -r 83397981a118 pylearn/algorithms/daa.py diff -r 9fb784c1f23d -r 83397981a118 pylearn/algorithms/logistic_regression.py diff -r 9fb784c1f23d -r 83397981a118 pylearn/algorithms/stacker.py diff -r 9fb784c1f23d -r 83397981a118 pylearn/algorithms/tests/test_daa.py diff -r 9fb784c1f23d -r 83397981a118 pylearn/dbdict/api0.py diff -r 9fb784c1f23d -r 83397981a118 pylearn/dbdict/sql_commands.py