changeset 451:d99fefbc9324

Added a KL-divergence.
author Joseph Turian <turian@gmail.com>
date Thu, 04 Sep 2008 14:46:30 -0400
parents 117e5b09cf31
children 739612d316a4
files cost.py
diffstat 1 files changed, 10 insertions(+), 0 deletions(-) [+]
line wrap: on
line diff
--- a/cost.py	Thu Sep 04 14:46:17 2008 -0400
+++ b/cost.py	Thu Sep 04 14:46:30 2008 -0400
@@ -6,6 +6,7 @@
 """
 
 import theano.tensor as T
+from xlogx import xlogx
 
 def quadratic(target, output, axis=1):
     return T.mean(T.sqr(target - output), axis)
@@ -16,3 +17,12 @@
     @warning: OUTPUT and TARGET are reversed in nnet_ops.binary_crossentropy
     """
     return -T.mean(target * T.log(output) + (1 - target) * T.log(1 - output), axis=axis)
+
+def KL_divergence(target, output):
+    """
+    @note: We do not compute the mean, because if target and output have
+    different shapes then the result will be garbled.
+    """
+    return -(target * T.log(output) + (1 - target) * T.log(1 - output)) \
+            + (xlogx(target) + xlogx(1 - target))
+#    return cross_entropy(target, output, axis) - cross_entropy(target, target, axis)