Mercurial > pylearn
comparison sparse_random_autoassociator/graph.py @ 383:344d1f874af7
Small fix
author | Joseph Turian <turian@gmail.com> |
---|---|
date | Tue, 08 Jul 2008 01:59:42 -0400 |
parents | b4efd192d880 |
children | edec18614a70 |
comparison
equal
deleted
inserted
replaced
382:b4efd192d880 | 383:344d1f874af7 |
---|---|
4 @todo: Make nearly everything private. | 4 @todo: Make nearly everything private. |
5 """ | 5 """ |
6 | 6 |
7 from globals import MARGIN | 7 from globals import MARGIN |
8 | 8 |
9 from pylearn.nnet_ops import sigmoid, crossentropy_softmax_1hot | 9 from pylearn.nnet_ops import sigmoid, binary_crossentropy |
10 from theano import tensor as t | 10 from theano import tensor as t |
11 from theano.tensor import dot | 11 from theano.tensor import dot |
12 xnonzero = t.dvector() | 12 xnonzero = t.dvector() |
13 w1nonzero = t.dmatrix() | 13 w1nonzero = t.dmatrix() |
14 b1 = t.dvector() | 14 b1 = t.dvector() |
27 nonzeroloss = hingeloss(ynonzero - t.max(yzero) - MARGIN) | 27 nonzeroloss = hingeloss(ynonzero - t.max(yzero) - MARGIN) |
28 zeroloss = hingeloss(-t.max(-(ynonzero)) - yzero - MARGIN) | 28 zeroloss = hingeloss(-t.max(-(ynonzero)) - yzero - MARGIN) |
29 # xnonzero sensitive loss: | 29 # xnonzero sensitive loss: |
30 #nonzeroloss = hingeloss(ynonzero - t.max(yzero) - MARGIN - xnonzero) | 30 #nonzeroloss = hingeloss(ynonzero - t.max(yzero) - MARGIN - xnonzero) |
31 #zeroloss = hingeloss(-t.max(-(ynonzero - xnonzero)) - yzero - MARGIN) | 31 #zeroloss = hingeloss(-t.max(-(ynonzero - xnonzero)) - yzero - MARGIN) |
32 loss = t.sum(nonzeroloss) + t.sum(zeroloss) | 32 #loss = t.sum(nonzeroloss) + t.sum(zeroloss) |
33 | 33 |
34 #loss = t.sum(binary_crossentropy(ynonzero, xnonzero)) + t.sum(binary_crossentropy(yzero, t.constant(0))) | 34 loss = t.sum(binary_crossentropy(ynonzero, xnonzero)) + t.sum(binary_crossentropy(yzero, t.constant(0))) |
35 | 35 |
36 (gw1nonzero, gb1, gw2nonzero, gw2zero, gb2nonzero, gb2zero) = t.grad(loss, [w1nonzero, b1, w2nonzero, w2zero, b2nonzero, b2zero]) | 36 (gw1nonzero, gb1, gw2nonzero, gw2zero, gb2nonzero, gb2zero) = t.grad(loss, [w1nonzero, b1, w2nonzero, w2zero, b2nonzero, b2zero]) |
37 | 37 |
38 import theano.compile | 38 import theano.compile |
39 | 39 |