Mercurial > pylearn
view pylearn/formulas/costs.py @ 1307:bc41fd23db25
Ported 4 cost formulas from XG github repository
author | boulanni <nicolas_boulanger@hotmail.com> |
---|---|
date | Mon, 04 Oct 2010 12:53:19 -0400 |
parents | 24890ca1d96b |
children | 63fe96ede21d |
line wrap: on
line source
""" Common training criteria. """ import theano import theano.tensor as T from tags import tags @tags('cost','binary','cross-entropy') def binary_crossentropy(output, target): """ Compute the crossentropy of binary output wrt binary target. .. math:: L_{CE} \equiv t\log(o) + (1-t)\log(1-o) :type output: Theano variable :param output: Binary output or prediction :math:`\in[0,1]` :type target: Theano variable :param target: Binary target usually :math:`\in\{0,1\}` """ return -(target * T.log(output) + (1.0 - target) * T.log(1.0 - output)) # This file seems like it has some overlap with theano.tensor.nnet. Which functions should go # in which file? @tags('cost','binary','cross-entropy', 'sigmoid') def sigmoid_crossentropy(output_act, target): """ Stable crossentropy of a sigmoid activation .. math:: L_{CE} \equiv t\log(\sigma(a)) + (1-t)\log(1-\sigma(a)) :type output_act: Theano variable :param output: Activation :type target: Theano variable :param target: Binary target usually :math:`\in\{0,1\}` """ return target * (- T.log(1.0 + T.exp(-output_act))) + (1.0 - target) * (- T.log(1.0 + T.exp(output_act))) @tags('cost','binary','cross-entropy', 'tanh') def tanh_crossentropy(output_act, target): """ Stable crossentropy of a tanh activation .. math:: L_{CE} \equiv t\log(\\frac{1+\\tanh(a)}2) + (1-t)\log(\\frac{1-\\tanh(a)}2) :type output_act: Theano variable :param output: Activation :type target: Theano variable :param target: Binary target usually :math:`\in\{0,1\}` """ return sigmoid_crossentropy(2.0*output_act, target) @tags('cost','binary','cross-entropy', 'tanh', 'abs') def abstanh_crossentropy(output_act, target): """ Stable crossentropy of a absolute value tanh activation .. math:: L_{CE} \equiv t\log(\\frac{1+\\tanh(|a|)}2) + (1-t)\log(\\frac{1-\\tanh(|a|)}2) :type output_act: Theano variable :param output: Activation :type target: Theano variable :param target: Binary target usually :math:`\in\{0,1\}` """ return tanh_crossentropy(T.abs_(output_act), target) @tags('cost','binary','cross-entropy', 'tanh', "normalized") def normtanh_crossentropy(output_act, target): """ Stable crossentropy of a "normalized" tanh activation (LeCun) .. math:: L_{CE} \equiv t\log(\\frac{1+\\tanh(0.6666a)}2) + (1-t)\log(\\frac{1-\\tanh(0.6666a)}2) :type output_act: Theano variable :param output: Activation :type target: Theano variable :param target: Binary target usually :math:`\in\{0,1\}` """ return tanh_crossentropy(0.6666*output_act, target)