changeset 722:e915f5c9bb21

Added more descriptive comments to crossentropy and KL divergence.
author Joseph Turian <turian@iro.umontreal.ca>
date Tue, 26 May 2009 17:39:43 -0400
parents df3aef87d8d2
children 2881c67026c1
files pylearn/algorithms/cost.py
diffstat 1 files changed, 4 insertions(+), 0 deletions(-) [+]
line wrap: on
line diff
--- a/pylearn/algorithms/cost.py	Mon May 25 23:13:56 2009 -0400
+++ b/pylearn/algorithms/cost.py	Tue May 26 17:39:43 2009 -0400
@@ -17,6 +17,8 @@
 
 def cross_entropy(target, output, mean_axis=0, sum_axis=1):
     """
+    This is the cross-entropy over a binomial event, in which each dimension
+    is an independent binomial trial.
     @todo: This is essentially duplicated as nnet_ops.binary_crossentropy
     @warning: OUTPUT and TARGET are reversed in nnet_ops.binary_crossentropy
     """
@@ -25,6 +27,8 @@
 
 def KL_divergence(target, output):
     """
+    This is a KL divergence over a binomial event, in which each dimension
+    is an independent binomial trial.
     @note: We do not compute the mean, because if target and output have
     different shapes then the result will be garbled.
     """