changeset 411:faffaae0d2f9

Autoassociator now seems to work
author Joseph Turian <turian@iro.umontreal.ca>
date Fri, 11 Jul 2008 15:13:44 -0400
parents 3cd4cfda2599
children 35b858a1d4fd
files sandbox/simple_autoassociator/globals.py sandbox/simple_autoassociator/graph.py sandbox/simple_autoassociator/model.py sandbox/simple_autoassociator/parameters.py
diffstat 4 files changed, 19 insertions(+), 24 deletions(-) [+]
line wrap: on
line diff
--- a/sandbox/simple_autoassociator/globals.py	Fri Jul 11 01:33:27 2008 -0400
+++ b/sandbox/simple_autoassociator/globals.py	Fri Jul 11 15:13:44 2008 -0400
@@ -5,8 +5,8 @@
 #INPUT_DIMENSION = 1000
 #INPUT_DIMENSION = 100
 INPUT_DIMENSION = 4
-HIDDEN_DIMENSION = 10
-#HIDDEN_DIMENSION = 4
+#HIDDEN_DIMENSION = 10
+HIDDEN_DIMENSION = 1
 LEARNING_RATE = 0.1
 LR = LEARNING_RATE
 SEED = 666
--- a/sandbox/simple_autoassociator/graph.py	Fri Jul 11 01:33:27 2008 -0400
+++ b/sandbox/simple_autoassociator/graph.py	Fri Jul 11 15:13:44 2008 -0400
@@ -17,10 +17,10 @@
 loss_unsummed = binary_crossentropy(y, x)
 loss = t.sum(loss_unsummed)
 
-(gw1, gb1, gw2, gb2, gy, gh) = t.grad(loss, [w1, b1, w2, b2, y, h])
+(gw1, gb1, gw2, gb2) = t.grad(loss, [w1, b1, w2, b2])
 
 import theano.compile
 
 inputs  = [x, w1, b1, w2, b2]
-outputs = [y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2, gy, gh]
+outputs = [y, h, loss, gw1, gb1, gw2, gb2]
 trainfn = theano.compile.function(inputs, outputs)
--- a/sandbox/simple_autoassociator/model.py	Fri Jul 11 01:33:27 2008 -0400
+++ b/sandbox/simple_autoassociator/model.py	Fri Jul 11 15:13:44 2008 -0400
@@ -28,19 +28,16 @@
         for idx in instance.keys():
             x[idx] = instance[idx]
 
-        (y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2, gy, gh) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2)
-        print
-        print "instance:", instance
-        print "x:", x
-        print "OLD y:", y
-        print "OLD loss (unsummed):", loss_unsummed
-        print "gy:", gy
-        print "gh:", gh
+        (y, h, loss, gw1, gb1, gw2, gb2) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2)
+#        print
+#        print "instance:", instance
+#        print "x:", x
+#        print "OLD y:", y
         print "OLD total loss:", loss
-        print "gw1:", gw1
-        print "gb1:", gb1
-        print "gw2:", gw2
-        print "gb2:", gb2
+#        print "gw1:", gw1
+#        print "gb1:", gb1
+#        print "gw2:", gw2
+#        print "gb2:", gb2
 
         # SGD update
         self.parameters.w1  -= LR * gw1
@@ -49,10 +46,8 @@
         self.parameters.b2  -= LR * gb2
 
         # Recompute the loss, to make sure it's descreasing
-        (y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2, gy, gh) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2)
-        print "NEW y:", y
-        print "NEW loss (unsummed):", loss_unsummed
-        print "gy:", gy
+        (y, h, loss, gw1, gb1, gw2, gb2) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2)
+#        print "NEW y:", y
         print "NEW total loss:", loss
-        print "h:", h
-        print self.parameters
+#        print "h:", h
+#        print self.parameters
--- a/sandbox/simple_autoassociator/parameters.py	Fri Jul 11 01:33:27 2008 -0400
+++ b/sandbox/simple_autoassociator/parameters.py	Fri Jul 11 15:13:44 2008 -0400
@@ -20,8 +20,8 @@
             self.w1 = (numpy.random.rand(input_dimension, hidden_dimension)-0.5)/input_dimension
             self.w2 = (numpy.random.rand(hidden_dimension, input_dimension)-0.5)/hidden_dimension
             self.b1 = numpy.zeros(hidden_dimension)
-            #self.b2 = numpy.zeros(input_dimension)
-            self.b2 = numpy.array([10, 0, 0, -10])
+            self.b2 = numpy.zeros(input_dimension)
+            #self.b2 = numpy.array([10, 0, 0, -10])
         else:
             self.w1 = numpy.zeros((input_dimension, hidden_dimension))
             self.w2 = numpy.zeros((hidden_dimension, input_dimension))