changeset 417:4f61201fa9a9

Parameters are no longer global
author Joseph Turian <turian@iro.umontreal.ca>
date Fri, 11 Jul 2008 17:19:37 -0400
parents 8849eba55520
children 2ea14774eb07
files sandbox/rbm/model.py sandbox/simple_autoassociator/README.txt sandbox/simple_autoassociator/globals.py sandbox/simple_autoassociator/main.py sandbox/simple_autoassociator/model.py sandbox/simple_autoassociator/parameters.py
diffstat 6 files changed, 42 insertions(+), 39 deletions(-) [+]
line wrap: on
line diff
--- a/sandbox/rbm/model.py	Fri Jul 11 16:34:46 2008 -0400
+++ b/sandbox/rbm/model.py	Fri Jul 11 17:19:37 2008 -0400
@@ -59,7 +59,7 @@
 
         random.seed(random_seed)
 
-        self.parameters = parameters.Parameters(input_dimension=self.input_dimension, hidden_dimension=self.hidden_dimension, randomly_initialize=False, random_seed=self.random_seed)
+        self.parameters = parameters.Parameters(input_dimension=self.input_dimension, hidden_dimension=self.hidden_dimension, randomly_initialize=True, random_seed=self.random_seed)
         self.prev_dw = 0
         self.prev_db = 0
         self.prev_dc = 0
--- a/sandbox/simple_autoassociator/README.txt	Fri Jul 11 16:34:46 2008 -0400
+++ b/sandbox/simple_autoassociator/README.txt	Fri Jul 11 17:19:37 2008 -0400
@@ -1,1 +1,5 @@
 This seems to work.
+
+@todo:
+    * Add momentum.
+    * Add learning rate decay schedule.
--- a/sandbox/simple_autoassociator/globals.py	Fri Jul 11 16:34:46 2008 -0400
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,12 +0,0 @@
-"""
-Global variables.
-"""
-
-INPUT_DIMENSION = 1000
-#INPUT_DIMENSION = 100
-#INPUT_DIMENSION = 4
-HIDDEN_DIMENSION = 10
-#HIDDEN_DIMENSION = 1
-LEARNING_RATE = 0.1
-LR = LEARNING_RATE
-SEED = 666
--- a/sandbox/simple_autoassociator/main.py	Fri Jul 11 16:34:46 2008 -0400
+++ b/sandbox/simple_autoassociator/main.py	Fri Jul 11 17:19:37 2008 -0400
@@ -21,7 +21,7 @@
 ##nonzero_instances.append({1: 0.2, 2: 0.3, 5: 0.5})
 
 import model
-model = model.Model()
+model = model.Model(input_dimension=10, hidden_dimension=4)
 
 for i in xrange(100000):
 #    # Select an instance
--- a/sandbox/simple_autoassociator/model.py	Fri Jul 11 16:34:46 2008 -0400
+++ b/sandbox/simple_autoassociator/model.py	Fri Jul 11 17:19:37 2008 -0400
@@ -6,22 +6,30 @@
 from graph import trainfn
 import parameters
 
-import globals
-from globals import LR
-
 import numpy
 import random
-random.seed(globals.SEED)
 
 import pylearn.sparse_instance
 
 class Model:
-    def __init__(self):
-        self.parameters = parameters.Parameters(randomly_initialize=True)
+    """
+    @todo: Add momentum.
+    @todo: Add learning rate decay schedule.
+    """
+    def __init__(self, input_dimension, hidden_dimension, learning_rate = 0.1, weight_decay = 0.0002, random_seed = 666):
+        self.input_dimension    = input_dimension
+        self.hidden_dimension   = hidden_dimension
+        self.learning_rate      = learning_rate
+        self.weight_decay       = weight_decay
+        self.random_seed        = random_seed
 
-#    def deterministic_reconstruction(self, x):
-#        (y, h, loss, gw1, gb1, gw2, gb2) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2)
-#        return y
+        random.seed(random_seed)
+
+        self.parameters = parameters.Parameters(input_dimension=self.input_dimension, hidden_dimension=self.hidden_dimension, randomly_initialize=True, random_seed=self.random_seed)
+
+    def deterministic_reconstruction(self, x):
+        (y, h, loss, gw1, gb1, gw2, gb2) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2)
+        return y
 
     def update(self, instances):
         """
@@ -29,10 +37,11 @@
         @param instances: A list of dict from feature index to (non-zero) value.
         @todo: Should assert that nonzero_indices and zero_indices
         are correct (i.e. are truly nonzero/zero).
+        @todo: Multiply L{self.weight_decay} by L{self.learning_rate}, as done in Semantic Hashing?
+        @todo: Decay the biases too?
         """
         minibatch = len(instances)
-#        x = pylearn.sparse_instance.to_vector(instances, self.input_dimension)
-        x = pylearn.sparse_instance.to_vector(instances, globals.INPUT_DIMENSION)
+        x = pylearn.sparse_instance.to_vector(instances, self.input_dimension)
 
         (y, h, loss, gw1, gb1, gw2, gb2) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2)
 #        print
@@ -45,15 +54,18 @@
 #        print "gw2:", gw2
 #        print "gb2:", gb2
 
-        # SGD update
-        self.parameters.w1  -= LR * gw1
-        self.parameters.b1  -= LR * gb1
-        self.parameters.w2  -= LR * gw2
-        self.parameters.b2  -= LR * gb2
+        self.parameters.w1 *= (1 - self.weight_decay)
+        self.parameters.w2 *= (1 - self.weight_decay)
 
-        # Recompute the loss, to make sure it's descreasing
-        (y, h, loss, gw1, gb1, gw2, gb2) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2)
-#        print "NEW y:", y
-        print "NEW total loss:", loss
-#        print "h:", h
-#        print self.parameters
+        # SGD update
+        self.parameters.w1  -= self.learning_rate * gw1 / minibatch
+        self.parameters.b1  -= self.learning_rate * gb1 / minibatch
+        self.parameters.w2  -= self.learning_rate * gw2 / minibatch
+        self.parameters.b2  -= self.learning_rate * gb2 / minibatch
+
+#        # Recompute the loss, to make sure it's descreasing
+#        (y, h, loss, gw1, gb1, gw2, gb2) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2)
+##        print "NEW y:", y
+#        print "NEW total loss:", loss
+##        print "h:", h
+##        print self.parameters
--- a/sandbox/simple_autoassociator/parameters.py	Fri Jul 11 16:34:46 2008 -0400
+++ b/sandbox/simple_autoassociator/parameters.py	Fri Jul 11 17:19:37 2008 -0400
@@ -3,20 +3,19 @@
 """
 
 import numpy
-import globals
 
 class Parameters:
     """
     Parameters used by the L{Model}.
     """
-    def __init__(self, input_dimension=globals.INPUT_DIMENSION, hidden_dimension=globals.HIDDEN_DIMENSION, randomly_initialize=False, seed=globals.SEED):
+    def __init__(self, input_dimension, hidden_dimension, randomly_initialize, random_seed):
         """
         Initialize L{Model} parameters.
         @param randomly_initialize: If True, then randomly initialize
         according to the given seed. If False, then just use zeroes.
         """
         if randomly_initialize:
-            numpy.random.seed(seed)
+            numpy.random.seed(random_seed)
             self.w1 = (numpy.random.rand(input_dimension, hidden_dimension)-0.5)/input_dimension
             self.w2 = (numpy.random.rand(hidden_dimension, input_dimension)-0.5)/hidden_dimension
             self.b1 = numpy.zeros(hidden_dimension)