# HG changeset patch # User Joseph Turian # Date 1215496696 14400 # Node ID b4efd192d880d40dd73f3787dcd2b8f46666cef1 # Parent e4473d9697d768221faf7e5239aa7d49c4fd65c5 Moved xent loss to nnet_ups diff -r e4473d9697d7 -r b4efd192d880 nnet_ops.py --- a/nnet_ops.py Tue Jul 08 01:57:33 2008 -0400 +++ b/nnet_ops.py Tue Jul 08 01:58:16 2008 -0400 @@ -379,3 +379,11 @@ def crossentropy_softmax_1hot(x, y_idx, **kwargs): b = tensor.zeros_like(x[0,:]) return crossentropy_softmax_1hot_with_bias(x, b, y_idx, **kwargs) + +def binary_crossentropy(output, target): + """ + Compute the crossentropy of binary output wrt binary target. + @note: We do not sum, crossentropy is computed by component. + @todo: Rewrite as a scalar, and then broadcast to tensor. + """ + return -(target * t.log(output) + (1 - target) * t.log(1 - output)) diff -r e4473d9697d7 -r b4efd192d880 sparse_random_autoassociator/graph.py --- a/sparse_random_autoassociator/graph.py Tue Jul 08 01:57:33 2008 -0400 +++ b/sparse_random_autoassociator/graph.py Tue Jul 08 01:58:16 2008 -0400 @@ -31,13 +31,6 @@ #zeroloss = hingeloss(-t.max(-(ynonzero - xnonzero)) - yzero - MARGIN) loss = t.sum(nonzeroloss) + t.sum(zeroloss) -def binary_crossentropy(output, target): - """ - Compute the crossentropy of binary output wrt binary target. - @note: We do not sum, crossentropy is computed by component. - @todo: Rewrite as a scalar, and then broadcast to tensor. - """ - return -(target * t.log(output) + (1 - target) * t.log(1 - output)) #loss = t.sum(binary_crossentropy(ynonzero, xnonzero)) + t.sum(binary_crossentropy(yzero, t.constant(0))) (gw1nonzero, gb1, gw2nonzero, gw2zero, gb2nonzero, gb2zero) = t.grad(loss, [w1nonzero, b1, w2nonzero, w2zero, b2nonzero, b2zero])