view cost.py @ 439:45879c1ecde7

More doc
author Joseph Turian <turian@iro.umontreal.ca>
date Tue, 19 Aug 2008 18:41:39 -0400
parents 0f366ecb11ee
children 0961d4b56ec5
line wrap: on
line source

"""
Cost functions.

@note: All of these functions return one cost per example. So it is your
job to perform a tensor.sum over the individual example losses.
"""

import theano.tensor as T

def quadratic(target, output, axis=1):
    return T.mean(T.sqr(target - output), axis)

def cross_entropy(target, output, axis=1):
    return -T.mean(target * T.log(output) + (1 - target) * T.log(1 - output), axis=axis)