comparison sparse_random_autoassociator/graph.py @ 370:a1bbcde6b456

Moved sparse_random_autoassociator from my repository
author Joseph Turian <turian@gmail.com>
date Mon, 07 Jul 2008 01:54:46 -0400
parents
children e4473d9697d7
comparison
equal deleted inserted replaced
369:90a29489b5c8 370:a1bbcde6b456
1 """
2 Theano graph for an autoassociator for sparse inputs, which will be trained
3 using Ronan Collobert + Jason Weston's sampling trick (2008).
4 @todo: Make nearly everything private.
5 """
6
7 from globals import MARGIN
8
9 from pylearn.nnet_ops import sigmoid, crossentropy_softmax_1hot
10 from theano import tensor as t
11 from theano.tensor import dot
12 xnonzero = t.dvector()
13 w1nonzero = t.dmatrix()
14 b1 = t.dvector()
15 w2nonzero = t.dmatrix()
16 w2zero = t.dmatrix()
17 b2nonzero = t.dvector()
18 b2zero = t.dvector()
19 h = sigmoid(dot(xnonzero, w1nonzero) + b1)
20 ynonzero = sigmoid(dot(h, w2nonzero) + b2nonzero)
21 yzero = sigmoid(dot(h, w2zero) + b2zero)
22
23 # May want to weight loss wrt nonzero value? e.g. MARGIN violation for
24 # 0.1 nonzero is not as bad as MARGIN violation for 0.2 nonzero.
25 def hingeloss(MARGIN):
26 return -MARGIN * (MARGIN < 0)
27 nonzeroloss = hingeloss(ynonzero - t.max(yzero) - MARGIN)
28 zeroloss = hingeloss(-t.max(-(ynonzero)) - yzero - MARGIN)
29 # xnonzero sensitive loss:
30 #nonzeroloss = hingeloss(ynonzero - t.max(yzero) - MARGIN - xnonzero)
31 #zeroloss = hingeloss(-t.max(-(ynonzero - xnonzero)) - yzero - MARGIN)
32 loss = t.sum(nonzeroloss) + t.sum(zeroloss)
33
34 (gw1nonzero, gb1, gw2nonzero, gw2zero, gb2nonzero, gb2zero) = t.grad(loss, [w1nonzero, b1, w2nonzero, w2zero, b2nonzero, b2zero])
35
36 import theano.compile
37
38 inputs = [xnonzero, w1nonzero, b1, w2nonzero, w2zero, b2nonzero, b2zero]
39 outputs = [ynonzero, yzero, loss, gw1nonzero, gb1, gw2nonzero, gw2zero, gb2nonzero, gb2zero]
40 trainfn = theano.compile.function(inputs, outputs)