changeset 383:344d1f874af7

Small fix
author Joseph Turian <turian@gmail.com>
date Tue, 08 Jul 2008 01:59:42 -0400
parents b4efd192d880
children edec18614a70
files nnet_ops.py sparse_random_autoassociator/graph.py
diffstat 2 files changed, 4 insertions(+), 4 deletions(-) [+]
line wrap: on
line diff
--- a/nnet_ops.py	Tue Jul 08 01:58:16 2008 -0400
+++ b/nnet_ops.py	Tue Jul 08 01:59:42 2008 -0400
@@ -386,4 +386,4 @@
     @note: We do not sum, crossentropy is computed by component.
     @todo: Rewrite as a scalar, and then broadcast to tensor.
     """
-    return -(target * t.log(output) + (1 - target) * t.log(1 - output))
+    return -(target * tensor.log(output) + (1 - target) * tensor.log(1 - output))
--- a/sparse_random_autoassociator/graph.py	Tue Jul 08 01:58:16 2008 -0400
+++ b/sparse_random_autoassociator/graph.py	Tue Jul 08 01:59:42 2008 -0400
@@ -6,7 +6,7 @@
 
 from globals import MARGIN
 
-from pylearn.nnet_ops import sigmoid, crossentropy_softmax_1hot
+from pylearn.nnet_ops import sigmoid, binary_crossentropy
 from theano import tensor as t
 from theano.tensor import dot
 xnonzero    = t.dvector()
@@ -29,9 +29,9 @@
 # xnonzero sensitive loss:
 #nonzeroloss = hingeloss(ynonzero - t.max(yzero) - MARGIN - xnonzero)
 #zeroloss = hingeloss(-t.max(-(ynonzero - xnonzero)) - yzero - MARGIN)
-loss = t.sum(nonzeroloss) + t.sum(zeroloss)
+#loss = t.sum(nonzeroloss) + t.sum(zeroloss)
 
-#loss = t.sum(binary_crossentropy(ynonzero, xnonzero)) + t.sum(binary_crossentropy(yzero, t.constant(0)))
+loss = t.sum(binary_crossentropy(ynonzero, xnonzero)) + t.sum(binary_crossentropy(yzero, t.constant(0)))
 
 (gw1nonzero, gb1, gw2nonzero, gw2zero, gb2nonzero, gb2zero) = t.grad(loss, [w1nonzero, b1, w2nonzero, w2zero, b2nonzero, b2zero])