changeset 388:98ca97cc9910

Debugging simple AA
author Joseph Turian <turian@gmail.com>
date Tue, 08 Jul 2008 17:41:26 -0400
parents dace8b9743af
children ec8aadb6694d
files simple_autoassociator.py/README.txt simple_autoassociator.py/globals.py simple_autoassociator.py/graph.py simple_autoassociator.py/main.py simple_autoassociator.py/model.py sparse_random_autoassociator/README.txt
diffstat 6 files changed, 21 insertions(+), 7 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/simple_autoassociator.py/README.txt	Tue Jul 08 17:41:26 2008 -0400
@@ -0,0 +1,1 @@
+This may be buggy. -jpt
--- a/simple_autoassociator.py/globals.py	Tue Jul 08 17:08:23 2008 -0400
+++ b/simple_autoassociator.py/globals.py	Tue Jul 08 17:41:26 2008 -0400
@@ -3,8 +3,10 @@
 """
 
 #INPUT_DIMENSION = 1000
+#INPUT_DIMENSION = 100
 INPUT_DIMENSION = 10
-HIDDEN_DIMENSION = 20
-LEARNING_RATE = 0.1
+#HIDDEN_DIMENSION = 20
+HIDDEN_DIMENSION = 4
+LEARNING_RATE = 0.01
 LR = LEARNING_RATE
 SEED = 666
--- a/simple_autoassociator.py/graph.py	Tue Jul 08 17:08:23 2008 -0400
+++ b/simple_autoassociator.py/graph.py	Tue Jul 08 17:41:26 2008 -0400
@@ -14,12 +14,13 @@
 h           = sigmoid(dot(x, w1) + b1)
 y           = sigmoid(dot(h, w2) + b2)
 
-loss = t.sum(binary_crossentropy(y, x))
+loss_unsummed = binary_crossentropy(y, x)
+loss = t.sum(loss_unsummed)
 
 (gw1, gb1, gw2, gb2) = t.grad(loss, [w1, b1, w2, b2])
 
 import theano.compile
 
 inputs  = [x, w1, b1, w2, b2]
-outputs = [y, loss, gw1, gb1, gw2, gb2]
+outputs = [y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2]
 trainfn = theano.compile.function(inputs, outputs)
--- a/simple_autoassociator.py/main.py	Tue Jul 08 17:08:23 2008 -0400
+++ b/simple_autoassociator.py/main.py	Tue Jul 08 17:41:26 2008 -0400
@@ -18,7 +18,7 @@
 nonzero_instances = []
 nonzero_instances.append({1: 0.1, 5: 0.5, 9: 1})
 nonzero_instances.append({2: 0.3, 5: 0.5, 8: 0.8})
-nonzero_instances.append({1: 0.2, 2: 0.3, 5: 0.5})
+#nonzero_instances.append({1: 0.2, 2: 0.3, 5: 0.5})
 
 import model
 model = model.Model()
--- a/simple_autoassociator.py/model.py	Tue Jul 08 17:08:23 2008 -0400
+++ b/simple_autoassociator.py/model.py	Tue Jul 08 17:41:26 2008 -0400
@@ -28,11 +28,17 @@
         for idx in instance.keys():
             x[idx] = instance[idx]
 
-        (y, loss, gw1, gb1, gw2, gb2) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2)
+        (y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2)
         print
         print "instance:", instance
+        print "x:", x
         print "OLD y:", y
+        print "NEW loss (unsummed):", loss_unsummed
         print "OLD total loss:", loss
+        print "gw1:", gw1
+        print "gb1:", gb1
+        print "gw2:", gw2
+        print "gb2:", gb2
 
         # SGD update
         self.parameters.w1  -= LR * gw1
@@ -41,6 +47,9 @@
         self.parameters.b2  -= LR * gb2
 
         # Recompute the loss, to make sure it's descreasing
-        (y, loss, gw1, gb1, gw2, gb2) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2)
+        (y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2)
         print "NEW y:", y
+        print "NEW loss (unsummed):", loss_unsummed
         print "NEW total loss:", loss
+        print h
+        print self.parameters
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sparse_random_autoassociator/README.txt	Tue Jul 08 17:41:26 2008 -0400
@@ -0,0 +1,1 @@
+This may be buggy. -jpt