changeset 386:a474341861fa

Added a simple AA
author Joseph Turian <turian@gmail.com>
date Tue, 08 Jul 2008 02:27:00 -0400
parents db28ff3fb887
children dace8b9743af efb797c5efc0
files simple_autoassociator.py/__init__.py simple_autoassociator.py/globals.py simple_autoassociator.py/graph.py simple_autoassociator.py/main.py simple_autoassociator.py/model.py simple_autoassociator.py/parameters.py sparse_random_autoassociator/__init__.py sparse_random_autoassociator/globals.py
diffstat 6 files changed, 141 insertions(+), 1 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/simple_autoassociator.py/globals.py	Tue Jul 08 02:27:00 2008 -0400
@@ -0,0 +1,10 @@
+"""
+Global variables.
+"""
+
+#INPUT_DIMENSION = 1000
+INPUT_DIMENSION = 10
+HIDDEN_DIMENSION = 20
+LEARNING_RATE = 0.1
+LR = LEARNING_RATE
+SEED = 666
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/simple_autoassociator.py/graph.py	Tue Jul 08 02:27:00 2008 -0400
@@ -0,0 +1,25 @@
+"""
+Theano graph for a simple autoassociator.
+@todo: Make nearly everything private.
+"""
+
+from pylearn.nnet_ops import sigmoid, binary_crossentropy
+from theano import tensor as t
+from theano.tensor import dot
+x           = t.dvector()
+w1          = t.dmatrix()
+b1          = t.dvector()
+w2          = t.dmatrix()
+b2          = t.dvector()
+h           = sigmoid(dot(x, w1) + b1)
+y           = sigmoid(dot(h, w2) + b2)
+
+loss = t.sum(binary_crossentropy(y, x))
+
+(gw1, gb1, gw2, gb2) = t.grad(loss, [w1, b1, w2, b2])
+
+import theano.compile
+
+inputs  = [x, w1, b1, w2, b2]
+outputs = [y, loss, gw1, gb1, gw2, gb2]
+trainfn = theano.compile.function(inputs, outputs)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/simple_autoassociator.py/main.py	Tue Jul 08 02:27:00 2008 -0400
@@ -0,0 +1,31 @@
+#!/usr/bin/python
+"""
+    A simple autoassociator.
+
+    The learned model is::
+       h   = sigmoid(dot(x, w1) + b1)
+       y   = sigmoid(dot(h, w2) + b2)
+
+    Binary xent loss.
+
+    LIMITATIONS:
+       - Only does pure stochastic gradient (batchsize = 1).
+"""
+
+
+import numpy
+
+nonzero_instances = []
+nonzero_instances.append({1: 0.1, 5: 0.5, 9: 1})
+nonzero_instances.append({2: 0.3, 5: 0.5, 8: 0.8})
+nonzero_instances.append({1: 0.2, 2: 0.3, 5: 0.5})
+
+import model
+model = model.Model()
+
+for i in xrange(100000):
+    # Select an instance
+    instance = nonzero_instances[i % len(nonzero_instances)]
+
+    # SGD update over instance
+    model.update(instance)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/simple_autoassociator.py/model.py	Tue Jul 08 02:27:00 2008 -0400
@@ -0,0 +1,46 @@
+"""
+The model for an autoassociator for sparse inputs, using Ronan Collobert + Jason
+Weston's sampling trick (2008).
+"""
+
+from graph import trainfn
+import parameters
+
+import globals
+from globals import LR
+
+import numpy
+import random
+random.seed(globals.SEED)
+
+class Model:
+    def __init__(self):
+        self.parameters = parameters.Parameters(randomly_initialize=True)
+
+    def update(self, instance):
+        """
+        Update the L{Model} using one training instance.
+        @param instance: A dict from feature index to (non-zero) value.
+        @todo: Should assert that nonzero_indices and zero_indices
+        are correct (i.e. are truly nonzero/zero).
+        """
+        x = numpy.zeros(globals.INPUT_DIMENSION)
+        for idx in instance.keys():
+            x[idx] = instance[idx]
+
+        (y, loss, gw1, gb1, gw2, gb2) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2)
+        print
+        print "instance:", instance
+        print "OLD y:", y
+        print "OLD total loss:", loss
+
+        # SGD update
+        self.parameters.w1  -= LR * gw1
+        self.parameters.b1  -= LR * gb1
+        self.parameters.w2  -= LR * gw2
+        self.parameters.b2  -= LR * gb2
+
+        # Recompute the loss, to make sure it's descreasing
+        (y, loss, gw1, gb1, gw2, gb2) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2)
+        print "NEW y:", y
+        print "NEW total loss:", loss
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/simple_autoassociator.py/parameters.py	Tue Jul 08 02:27:00 2008 -0400
@@ -0,0 +1,28 @@
+"""
+Parameters (weights) used by the L{Model}.
+"""
+
+import numpy
+import globals
+
+class Parameters:
+    """
+    Parameters used by the L{Model}.
+    """
+    def __init__(self, input_dimension=globals.INPUT_DIMENSION, hidden_dimension=globals.HIDDEN_DIMENSION, randomly_initialize=False, seed=globals.SEED):
+        """
+        Initialize L{Model} parameters.
+        @param randomly_initialize: If True, then randomly initialize
+        according to the given seed. If False, then just use zeroes.
+        """
+        if randomly_initialize:
+            numpy.random.seed(seed)
+            self.w1 = (numpy.random.rand(input_dimension, hidden_dimension)-0.5)/input_dimension
+            self.w2 = (numpy.random.rand(hidden_dimension, input_dimension)-0.5)/hidden_dimension
+            self.b1 = numpy.zeros(hidden_dimension)
+            self.b2 = numpy.zeros(input_dimension)
+        else:
+            self.w1 = numpy.zeros((input_dimension, hidden_dimension))
+            self.w2 = numpy.zeros((hidden_dimension, input_dimension))
+            self.b1 = numpy.zeros(hidden_dimension)
+            self.b2 = numpy.zeros(input_dimension)
--- a/sparse_random_autoassociator/globals.py	Tue Jul 08 02:00:14 2008 -0400
+++ b/sparse_random_autoassociator/globals.py	Tue Jul 08 02:27:00 2008 -0400
@@ -3,7 +3,7 @@
 """
 
 INPUT_DIMENSION = 1000
-HIDDEN_DIMENSION = 100
+HIDDEN_DIMENSION = 20
 LEARNING_RATE = 0.1
 LR = LEARNING_RATE
 SEED = 666