changeset 399:8796b91a9f09

RBM minibatch works
author Joseph Turian <turian@gmail.com>
date Tue, 08 Jul 2008 21:42:21 -0400
parents 6e55ccb7e2bf
children 269d5c5a4209
files sandbox/rbm/README.txt sandbox/rbm/globals.py sandbox/rbm/main.py sandbox/rbm/model.py sandbox/simple_autoassociator/README.txt sandbox/sparse_random_autoassociator/README.txt
diffstat 6 files changed, 33 insertions(+), 17 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sandbox/rbm/README.txt	Tue Jul 08 21:42:21 2008 -0400
@@ -0,0 +1,1 @@
+This seems to work fine.
--- a/sandbox/rbm/globals.py	Tue Jul 08 20:48:56 2008 -0400
+++ b/sandbox/rbm/globals.py	Tue Jul 08 21:42:21 2008 -0400
@@ -4,7 +4,8 @@
 
 INPUT_DIMENSION = 1000
 #INPUT_DIMENSION = 100
-HIDDEN_DIMENSION = 20
+HIDDEN_DIMENSION = 100
+#HIDDEN_DIMENSION = 20
 #HIDDEN_DIMENSION = 6
 LEARNING_RATE = 0.1
 LR = LEARNING_RATE
--- a/sandbox/rbm/main.py	Tue Jul 08 20:48:56 2008 -0400
+++ b/sandbox/rbm/main.py	Tue Jul 08 21:42:21 2008 -0400
@@ -1,9 +1,6 @@
 #!/usr/bin/python
 """
     An RBM with binomial units trained with CD-1.
-
-    LIMITATIONS:
-       - Only does pure stochastic gradient (batchsize = 1).
 """
 
 
--- a/sandbox/rbm/model.py	Tue Jul 08 20:48:56 2008 -0400
+++ b/sandbox/rbm/model.py	Tue Jul 08 21:42:21 2008 -0400
@@ -43,16 +43,18 @@
     def __init__(self):
         self.parameters = parameters.Parameters(randomly_initialize=True)
 
-    def update(self, instance):
+    def update(self, instances):
         """
         Update the L{Model} using one training instance.
         @param instance: A dict from feature index to (non-zero) value.
         @todo: Should assert that nonzero_indices and zero_indices
         are correct (i.e. are truly nonzero/zero).
         """
-        v0 = numpy.zeros((1, globals.INPUT_DIMENSION))
-        for idx in instance.keys():
-            v0[0][idx] = instance[idx]
+        v0 = numpy.zeros((len(instances), globals.INPUT_DIMENSION))
+        minibatch = len(instances)
+        for i in range(minibatch):
+            for idx in instances[i].keys():
+                v0[i][idx] = instances[i][idx]
 
         q0 = sigmoid(self.parameters.b + dot(v0, self.parameters.w))
         h0 = sample(q0)
@@ -60,15 +62,27 @@
         v1 = sample(p0)
         q1 = sigmoid(self.parameters.b + dot(v1, self.parameters.w))
         print
-        print "v[0]:", v0
-        print "Q(h[0][i] = 1 | v[0]):", q0
-        print "h[0]:", h0
-        print "P(v[1][j] = 1 | h[0]):", p0
+#        print "v[0]:", v0
+#        print "Q(h[0][i] = 1 | v[0]):", q0
+#        print "h[0]:", h0
+#        print "P(v[1][j] = 1 | h[0]):", p0
         print "XENT(P(v[1][j] = 1 | h[0]) | v0):", numpy.sum(crossentropy(p0, v0))
-        print "v[1]:", v1
-        print "Q(h[1][i] = 1 | v[1]):", q1
+#        print "v[1]:", v1
+#        print "Q(h[1][i] = 1 | v[1]):", q1
 
-        self.parameters.w += LR * (dot(v0.T, h0) - dot(v1.T, q1))
-        self.parameters.b += LR * (h0 - q1)
-        self.parameters.c += LR * (v0 - v1)
+#        print
+#        print v0.T.shape
+#        print h0.shape
+#        print dot(v0.T, h0).shape
+#        print self.parameters.w.shape
+        self.parameters.w += LR * (dot(v0.T, h0) - dot(v1.T, q1)) / minibatch
+#        print
+#        print h0.shape
+#        print q1.shape
+#        print self.parameters.b.shape
+        self.parameters.b += LR * numpy.sum(h0 - q1, axis=0) / minibatch
+#        print v0.shape, v1.shape
+#        print
+#        print self.parameters.c.shape
+        self.parameters.c += LR * numpy.sum(v0 - v1, axis=0) / minibatch
 #        print self.parameters
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sandbox/simple_autoassociator/README.txt	Tue Jul 08 21:42:21 2008 -0400
@@ -0,0 +1,2 @@
+This is broken. It can't even learn the simple two training instances in
+main.py
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sandbox/sparse_random_autoassociator/README.txt	Tue Jul 08 21:42:21 2008 -0400
@@ -0,0 +1,1 @@
+Since simple_aa doesn't work, this probably doesn't either.