# HG changeset patch # User boulanni # Date 1286211199 14400 # Node ID bc41fd23db2510f14177e9cbcbda7d7db5cd4c6f # Parent ef0f3deead94811d3ea4ba4b4f6ede7bbf2c6abb Ported 4 cost formulas from XG github repository diff -r ef0f3deead94 -r bc41fd23db25 pylearn/formulas/costs.py --- a/pylearn/formulas/costs.py Fri Oct 01 15:27:26 2010 -0400 +++ b/pylearn/formulas/costs.py Mon Oct 04 12:53:19 2010 -0400 @@ -22,3 +22,60 @@ # This file seems like it has some overlap with theano.tensor.nnet. Which functions should go # in which file? + +@tags('cost','binary','cross-entropy', 'sigmoid') +def sigmoid_crossentropy(output_act, target): + """ Stable crossentropy of a sigmoid activation + + .. math:: + L_{CE} \equiv t\log(\sigma(a)) + (1-t)\log(1-\sigma(a)) + + :type output_act: Theano variable + :param output: Activation + :type target: Theano variable + :param target: Binary target usually :math:`\in\{0,1\}` + """ + return target * (- T.log(1.0 + T.exp(-output_act))) + (1.0 - target) * (- T.log(1.0 + T.exp(output_act))) + +@tags('cost','binary','cross-entropy', 'tanh') +def tanh_crossentropy(output_act, target): + """ Stable crossentropy of a tanh activation + + .. math:: + L_{CE} \equiv t\log(\\frac{1+\\tanh(a)}2) + (1-t)\log(\\frac{1-\\tanh(a)}2) + + :type output_act: Theano variable + :param output: Activation + :type target: Theano variable + :param target: Binary target usually :math:`\in\{0,1\}` + """ + return sigmoid_crossentropy(2.0*output_act, target) + +@tags('cost','binary','cross-entropy', 'tanh', 'abs') +def abstanh_crossentropy(output_act, target): + """ Stable crossentropy of a absolute value tanh activation + + .. math:: + L_{CE} \equiv t\log(\\frac{1+\\tanh(|a|)}2) + (1-t)\log(\\frac{1-\\tanh(|a|)}2) + + :type output_act: Theano variable + :param output: Activation + :type target: Theano variable + :param target: Binary target usually :math:`\in\{0,1\}` + """ + return tanh_crossentropy(T.abs_(output_act), target) + +@tags('cost','binary','cross-entropy', 'tanh', "normalized") +def normtanh_crossentropy(output_act, target): + """ Stable crossentropy of a "normalized" tanh activation (LeCun) + + .. math:: + L_{CE} \equiv t\log(\\frac{1+\\tanh(0.6666a)}2) + (1-t)\log(\\frac{1-\\tanh(0.6666a)}2) + + :type output_act: Theano variable + :param output: Activation + :type target: Theano variable + :param target: Binary target usually :math:`\in\{0,1\}` + """ + return tanh_crossentropy(0.6666*output_act, target) +