changeset 1307:bc41fd23db25

Ported 4 cost formulas from XG github repository
author boulanni <nicolas_boulanger@hotmail.com>
date Mon, 04 Oct 2010 12:53:19 -0400
parents ef0f3deead94
children d5e536338b69
files pylearn/formulas/costs.py
diffstat 1 files changed, 57 insertions(+), 0 deletions(-) [+]
line wrap: on
line diff
--- a/pylearn/formulas/costs.py	Fri Oct 01 15:27:26 2010 -0400
+++ b/pylearn/formulas/costs.py	Mon Oct 04 12:53:19 2010 -0400
@@ -22,3 +22,60 @@
 
 # This file seems like it has some overlap with theano.tensor.nnet.  Which functions should go
 # in which file?
+
+@tags('cost','binary','cross-entropy', 'sigmoid')
+def sigmoid_crossentropy(output_act, target):
+    """ Stable crossentropy of a sigmoid activation
+
+    .. math::
+                L_{CE} \equiv t\log(\sigma(a)) + (1-t)\log(1-\sigma(a))
+
+    :type output_act: Theano variable
+    :param output: Activation
+    :type target: Theano variable
+    :param target: Binary target usually :math:`\in\{0,1\}`
+    """
+    return target * (- T.log(1.0 + T.exp(-output_act))) + (1.0 - target) * (- T.log(1.0 + T.exp(output_act)))
+
+@tags('cost','binary','cross-entropy', 'tanh')
+def tanh_crossentropy(output_act, target):
+    """ Stable crossentropy of a tanh activation
+
+    .. math::
+                L_{CE} \equiv t\log(\\frac{1+\\tanh(a)}2) + (1-t)\log(\\frac{1-\\tanh(a)}2)
+
+    :type output_act: Theano variable
+    :param output: Activation
+    :type target: Theano variable
+    :param target: Binary target usually :math:`\in\{0,1\}`
+    """
+    return sigmoid_crossentropy(2.0*output_act, target)
+
+@tags('cost','binary','cross-entropy', 'tanh', 'abs')
+def abstanh_crossentropy(output_act, target):
+    """ Stable crossentropy of a absolute value tanh activation
+
+    .. math::
+                L_{CE} \equiv t\log(\\frac{1+\\tanh(|a|)}2) + (1-t)\log(\\frac{1-\\tanh(|a|)}2)
+
+    :type output_act: Theano variable
+    :param output: Activation
+    :type target: Theano variable
+    :param target: Binary target usually :math:`\in\{0,1\}`
+    """
+    return tanh_crossentropy(T.abs_(output_act), target)
+
+@tags('cost','binary','cross-entropy', 'tanh', "normalized")
+def normtanh_crossentropy(output_act, target):
+    """ Stable crossentropy of a "normalized" tanh activation (LeCun)
+
+    .. math::
+                L_{CE} \equiv t\log(\\frac{1+\\tanh(0.6666a)}2) + (1-t)\log(\\frac{1-\\tanh(0.6666a)}2)
+
+    :type output_act: Theano variable
+    :param output: Activation
+    :type target: Theano variable
+    :param target: Binary target usually :math:`\in\{0,1\}`
+    """
+    return tanh_crossentropy(0.6666*output_act, target)
+