Mercurial > pylearn
annotate sandbox/simple_autoassociator/graph.py @ 494:02a331ba833b
merge
author | Joseph Turian <turian@gmail.com> |
---|---|
date | Tue, 28 Oct 2008 11:40:56 -0400 |
parents | 8849eba55520 |
children |
rev | line source |
---|---|
370
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
1 """ |
386 | 2 Theano graph for a simple autoassociator. |
370
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
3 @todo: Make nearly everything private. |
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
4 """ |
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
5 |
383 | 6 from pylearn.nnet_ops import sigmoid, binary_crossentropy |
370
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
7 from theano import tensor as t |
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
8 from theano.tensor import dot |
416
8849eba55520
Can now do minibatch update
Joseph Turian <turian@iro.umontreal.ca>
parents:
411
diff
changeset
|
9 x = t.dmatrix() |
386 | 10 w1 = t.dmatrix() |
370
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
11 b1 = t.dvector() |
386 | 12 w2 = t.dmatrix() |
13 b2 = t.dvector() | |
14 h = sigmoid(dot(x, w1) + b1) | |
15 y = sigmoid(dot(h, w2) + b2) | |
370
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
16 |
388 | 17 loss_unsummed = binary_crossentropy(y, x) |
18 loss = t.sum(loss_unsummed) | |
370
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
19 |
411
faffaae0d2f9
Autoassociator now seems to work
Joseph Turian <turian@iro.umontreal.ca>
parents:
404
diff
changeset
|
20 (gw1, gb1, gw2, gb2) = t.grad(loss, [w1, b1, w2, b2]) |
370
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
21 |
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
22 import theano.compile |
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
23 |
386 | 24 inputs = [x, w1, b1, w2, b2] |
411
faffaae0d2f9
Autoassociator now seems to work
Joseph Turian <turian@iro.umontreal.ca>
parents:
404
diff
changeset
|
25 outputs = [y, h, loss, gw1, gb1, gw2, gb2] |
370
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
26 trainfn = theano.compile.function(inputs, outputs) |