diff pylearn/sandbox/sparse_random_autoassociator/model.py @ 537:b054271b2504

new file structure layout, factories, etc.
author James Bergstra <bergstrj@iro.umontreal.ca>
date Wed, 12 Nov 2008 21:57:54 -0500
parents sandbox/sparse_random_autoassociator/model.py@36baeb7125a4
children
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/pylearn/sandbox/sparse_random_autoassociator/model.py	Wed Nov 12 21:57:54 2008 -0500
@@ -0,0 +1,76 @@
+"""
+The model for an autoassociator for sparse inputs, using Ronan Collobert + Jason
+Weston's sampling trick (2008).
+"""
+
+from graph import trainfn
+import parameters
+
+import globals
+from globals import LR
+
+import numpy
+import random
+random.seed(globals.SEED)
+
+def _select_indices(instance):
+    """
+    Choose nonzero and zero indices (feature columns) of the instance.
+    We select B{all} nonzero indices.
+    We select L{globals.ZERO_SAMPLE_SIZE} zero indices randomly,
+    without replacement.
+    @bug: If there are not ZERO_SAMPLE_SIZE zeroes, we will enter
+    an endless loop.
+    @return: (nonzero_indices, zero_indices)
+    """
+    # Get the nonzero indices
+    nonzero_indices = instance.keys()
+    nonzero_indices.sort()
+
+    # Get the zero indices
+    # @bug: If there are not ZERO_SAMPLE_SIZE zeroes, we will enter an endless loop.
+    zero_indices = []
+    while len(zero_indices) < globals.ZERO_SAMPLE_SIZE:
+        idx = random.randint(0, globals.INPUT_DIMENSION - 1)
+        if idx in nonzero_indices or idx in zero_indices: continue
+        zero_indices.append(idx)
+    zero_indices.sort()
+
+    return (nonzero_indices, zero_indices)
+
+class Model:
+    def __init__(self):
+        self.parameters = parameters.Parameters(randomly_initialize=True)
+
+    def update(self, instance):
+        """
+        Update the L{Model} using one training instance.
+        @param instance: A dict from feature index to (non-zero) value.
+        @todo: Should assert that nonzero_indices and zero_indices
+        are correct (i.e. are truly nonzero/zero).
+        """
+        (nonzero_indices, zero_indices) = _select_indices(instance)
+        # No update if there aren't any non-zeros.
+        if len(nonzero_indices) == 0: return
+        xnonzero = numpy.asarray([instance[idx] for idx in nonzero_indices])
+        print
+        print "xnonzero:", xnonzero
+
+        (ynonzero, yzero, loss, gw1nonzero, gb1, gw2nonzero, gw2zero, gb2nonzero, gb2zero) = trainfn(xnonzero, self.parameters.w1[nonzero_indices, :], self.parameters.b1, self.parameters.w2[:, nonzero_indices], self.parameters.w2[:, zero_indices], self.parameters.b2[nonzero_indices], self.parameters.b2[zero_indices])
+        print "OLD ynonzero:", ynonzero
+        print "OLD yzero:", yzero
+        print "OLD total loss:", loss
+
+        # SGD update
+        self.parameters.w1[nonzero_indices, :]  -= LR * gw1nonzero
+        self.parameters.b1						-= LR * gb1
+        self.parameters.w2[:, nonzero_indices]  -= LR * gw2nonzero
+        self.parameters.w2[:, zero_indices]		-= LR * gw2zero
+        self.parameters.b2[nonzero_indices]		-= LR * gb2nonzero
+        self.parameters.b2[zero_indices]		-= LR * gb2zero
+
+        # Recompute the loss, to make sure it's descreasing
+        (ynonzero, yzero, loss, gw1nonzero, gb1, gw2nonzero, gw2zero, gb2nonzero, gb2zero) = trainfn(xnonzero, self.parameters.w1[nonzero_indices, :], self.parameters.b1, self.parameters.w2[:, nonzero_indices], self.parameters.w2[:, zero_indices], self.parameters.b2[nonzero_indices], self.parameters.b2[zero_indices])
+        print "NEW ynonzero:", ynonzero
+        print "NEW yzero:", yzero
+        print "NEW total loss:", loss