diff sparse_random_autoassociator/main.py @ 372:75bab24bb2d8

Moved more logic into model.py
author Joseph Turian <turian@gmail.com>
date Mon, 07 Jul 2008 02:06:15 -0400
parents 22463a194c90
children e4473d9697d7
line wrap: on
line diff
--- a/sparse_random_autoassociator/main.py	Mon Jul 07 01:57:49 2008 -0400
+++ b/sparse_random_autoassociator/main.py	Mon Jul 07 02:06:15 2008 -0400
@@ -25,15 +25,10 @@
        - Loss is irrespective of the xnonzero magnitude.
        - We will always use all nonzero entries, even if the training
        instance is very non-sparse.
-       
-    @bug: If there are not ZERO_SAMPLE_SIZE zeroes, we will enter an
-    endless loop.
 """
 
 
-import numpy, random
-import globals
-random.seed(globals.SEED)
+import numpy
 
 nonzero_instances = []
 nonzero_instances.append({1: 0.1, 5: 0.5, 9: 1})
@@ -47,18 +42,5 @@
     # Select an instance
     instance = nonzero_instances[i % len(nonzero_instances)]
 
-    # Get the nonzero indices
-    nonzero_indexes = instance.keys()
-    nonzero_indexes.sort()
-
-    # Get the zero indices
-    # @bug: If there are not ZERO_SAMPLE_SIZE zeroes, we will enter an endless loop.
-    zero_indexes = []
-    while len(zero_indexes) < globals.ZERO_SAMPLE_SIZE:
-        idx = random.randint(0, globals.INPUT_DIMENSION - 1)
-        if idx in nonzero_indexes or idx in zero_indexes: continue
-        zero_indexes.append(idx)
-    zero_indexes.sort()
-
     # SGD update over instance
-    model.update(instance, nonzero_indexes, zero_indexes)
+    model.update(instance)