diff simple_autoassociator/graph.py @ 392:e2cb8d489908

More debugging
author Joseph Turian <turian@gmail.com>
date Tue, 08 Jul 2008 18:45:35 -0400
parents ec8aadb6694d
children
line wrap: on
line diff
--- a/simple_autoassociator/graph.py	Tue Jul 08 17:41:45 2008 -0400
+++ b/simple_autoassociator/graph.py	Tue Jul 08 18:45:35 2008 -0400
@@ -17,10 +17,10 @@
 loss_unsummed = binary_crossentropy(y, x)
 loss = t.sum(loss_unsummed)
 
-(gw1, gb1, gw2, gb2) = t.grad(loss, [w1, b1, w2, b2])
+(gw1, gb1, gw2, gb2, gy) = t.grad(loss, [w1, b1, w2, b2, y])
 
 import theano.compile
 
 inputs  = [x, w1, b1, w2, b2]
-outputs = [y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2]
+outputs = [y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2, gy]
 trainfn = theano.compile.function(inputs, outputs)