Mercurial > pylearn
comparison sparse_random_autoassociator/graph.py @ 381:e4473d9697d7
Added xent loss
author | Joseph Turian <turian@gmail.com> |
---|---|
date | Tue, 08 Jul 2008 01:57:33 -0400 |
parents | a1bbcde6b456 |
children | b4efd192d880 |
comparison
equal
deleted
inserted
replaced
373:42cc94cf6c12 | 381:e4473d9697d7 |
---|---|
29 # xnonzero sensitive loss: | 29 # xnonzero sensitive loss: |
30 #nonzeroloss = hingeloss(ynonzero - t.max(yzero) - MARGIN - xnonzero) | 30 #nonzeroloss = hingeloss(ynonzero - t.max(yzero) - MARGIN - xnonzero) |
31 #zeroloss = hingeloss(-t.max(-(ynonzero - xnonzero)) - yzero - MARGIN) | 31 #zeroloss = hingeloss(-t.max(-(ynonzero - xnonzero)) - yzero - MARGIN) |
32 loss = t.sum(nonzeroloss) + t.sum(zeroloss) | 32 loss = t.sum(nonzeroloss) + t.sum(zeroloss) |
33 | 33 |
34 def binary_crossentropy(output, target): | |
35 """ | |
36 Compute the crossentropy of binary output wrt binary target. | |
37 @note: We do not sum, crossentropy is computed by component. | |
38 @todo: Rewrite as a scalar, and then broadcast to tensor. | |
39 """ | |
40 return -(target * t.log(output) + (1 - target) * t.log(1 - output)) | |
41 #loss = t.sum(binary_crossentropy(ynonzero, xnonzero)) + t.sum(binary_crossentropy(yzero, t.constant(0))) | |
42 | |
34 (gw1nonzero, gb1, gw2nonzero, gw2zero, gb2nonzero, gb2zero) = t.grad(loss, [w1nonzero, b1, w2nonzero, w2zero, b2nonzero, b2zero]) | 43 (gw1nonzero, gb1, gw2nonzero, gw2zero, gb2nonzero, gb2zero) = t.grad(loss, [w1nonzero, b1, w2nonzero, w2zero, b2nonzero, b2zero]) |
35 | 44 |
36 import theano.compile | 45 import theano.compile |
37 | 46 |
38 inputs = [xnonzero, w1nonzero, b1, w2nonzero, w2zero, b2nonzero, b2zero] | 47 inputs = [xnonzero, w1nonzero, b1, w2nonzero, w2zero, b2nonzero, b2zero] |