changeset 382:b4efd192d880

Moved xent loss to nnet_ups
author Joseph Turian <turian@gmail.com>
date Tue, 08 Jul 2008 01:58:16 -0400
parents e4473d9697d7
children 344d1f874af7
files nnet_ops.py sparse_random_autoassociator/graph.py
diffstat 2 files changed, 8 insertions(+), 7 deletions(-) [+]
line wrap: on
line diff
--- a/nnet_ops.py	Tue Jul 08 01:57:33 2008 -0400
+++ b/nnet_ops.py	Tue Jul 08 01:58:16 2008 -0400
@@ -379,3 +379,11 @@
 def crossentropy_softmax_1hot(x, y_idx, **kwargs):
     b = tensor.zeros_like(x[0,:])
     return crossentropy_softmax_1hot_with_bias(x, b, y_idx, **kwargs)
+
+def binary_crossentropy(output, target):
+    """
+    Compute the crossentropy of binary output wrt binary target.
+    @note: We do not sum, crossentropy is computed by component.
+    @todo: Rewrite as a scalar, and then broadcast to tensor.
+    """
+    return -(target * t.log(output) + (1 - target) * t.log(1 - output))
--- a/sparse_random_autoassociator/graph.py	Tue Jul 08 01:57:33 2008 -0400
+++ b/sparse_random_autoassociator/graph.py	Tue Jul 08 01:58:16 2008 -0400
@@ -31,13 +31,6 @@
 #zeroloss = hingeloss(-t.max(-(ynonzero - xnonzero)) - yzero - MARGIN)
 loss = t.sum(nonzeroloss) + t.sum(zeroloss)
 
-def binary_crossentropy(output, target):
-    """
-    Compute the crossentropy of binary output wrt binary target.
-    @note: We do not sum, crossentropy is computed by component.
-    @todo: Rewrite as a scalar, and then broadcast to tensor.
-    """
-    return -(target * t.log(output) + (1 - target) * t.log(1 - output))
 #loss = t.sum(binary_crossentropy(ynonzero, xnonzero)) + t.sum(binary_crossentropy(yzero, t.constant(0)))
 
 (gw1nonzero, gb1, gw2nonzero, gw2zero, gb2nonzero, gb2zero) = t.grad(loss, [w1nonzero, b1, w2nonzero, w2zero, b2nonzero, b2zero])