changeset 533:de974b4fc4ea

Bugfix in pylearn.embeddings.length()
author Joseph Turian <turian@gmail.com>
date Tue, 18 Nov 2008 03:25:54 -0500
parents 34ee3aff3e8f
children eaa5ad4089a1
files algorithms/tests/test_daa.py embeddings/parameters.py embeddings/process.py
diffstat 3 files changed, 24 insertions(+), 16 deletions(-) [+]
line wrap: on
line diff
--- a/algorithms/tests/test_daa.py	Tue Nov 18 02:57:50 2008 -0500
+++ b/algorithms/tests/test_daa.py	Tue Nov 18 03:25:54 2008 -0500
@@ -28,7 +28,7 @@
             model.local_update[l]([[0, 1, 0, 1]])
             model.local_update[l]([[1, 0, 1, 0]])
 
-    for i in range(1):
+    for i in range(10):
         model.update([[0, 1, 0, 1]], [[1]])
         model.update([[1, 0, 1, 0]], [[0]])
     print model.classify([[0, 1, 0, 1]])
@@ -41,23 +41,31 @@
     daa = models.Stacker([(models.SigmoidXEDenoisingAA, 'hidden')] * ndaa + [(pylearn.algorithms.logistic_regression.Module_Nclass, 'pred')],
                          regularize = False)
 
-    model = daa.make([4, 20, 20, 20, 10],
+    model = daa.make([4] + [20] * ndaa + [10],
                      lr = 0.01,
                      mode = mode,
                      seed = 10)
 
-    model.layers[0].noise_level = 0.3
-    model.layers[1].noise_level = 0.3
-    model.layers[2].noise_level = 0.3
+    for l in range(ndaa): model.layers[l].noise_level = 0.3
 
-    for l in range(3):
+    instances = [([[0, 1, 0, 1]], [1]), ([[1, 0, 1, 0]], [0])]
+
+    for l in range(ndaa):
         for i in range(10):
-            model.local_update[l]([[0, 1, 0, 1]])
-            model.local_update[l]([[1, 0, 1, 0]])
+            for (input, output) in instances:
+                model.local_update[l](input)
 
-    for i in range(1):
-        model.update([[0, 1, 0, 1]], [1])
-        model.update([[1, 0, 1, 0]], [0])
+    for i in range(10):
+        for (input, output) in instances:
+#            model.update(input, output)
+            print "OLD:", 
+            print model.validate(input, output)
+            oldloss = model.update(input, output)
+            print oldloss
+            print "NEW:"
+            print model.validate(input, output)
+            print 
+
     print model.apply([[0, 1, 0, 1]])
     print model.apply([[1, 0, 1, 0]])
 
--- a/embeddings/parameters.py	Tue Nov 18 02:57:50 2008 -0500
+++ b/embeddings/parameters.py	Tue Nov 18 03:25:54 2008 -0500
@@ -1,10 +1,10 @@
 """
 Locations of the embedding data files.
 """
-WEIGHTSFILE     = "/home/fringant2/lisa/data/word_embeddings.collobert-and-weston/lm-weights.txt"
-VOCABFILE       = "/home/fringant2/lisa/data/word_embeddings.collobert-and-weston/words.asc"
-#WEIGHTSFILE     = "/home/joseph/data/word_embeddings.collobert-and-weston/lm-weights.txt"
-#VOCABFILE       = "/home/joseph/data/word_embeddings.collobert-and-weston/words.asc"
+#WEIGHTSFILE     = "/home/fringant2/lisa/data/word_embeddings.collobert-and-weston/lm-weights.txt"
+#VOCABFILE       = "/home/fringant2/lisa/data/word_embeddings.collobert-and-weston/words.asc"
+WEIGHTSFILE     = "/home/joseph/data/word_embeddings.collobert-and-weston/lm-weights.txt"
+VOCABFILE       = "/home/joseph/data/word_embeddings.collobert-and-weston/words.asc"
 NUMBER_OF_WORDS = 30000
 DIMENSIONS      = 50
 UNKNOWN         = "UNKNOWN"
--- a/embeddings/process.py	Tue Nov 18 02:57:50 2008 -0500
+++ b/embeddings/process.py	Tue Nov 18 03:25:54 2008 -0500
@@ -15,7 +15,7 @@
     """
     @return: The length of embeddings
     """
-    len(__word_to_embedding[0])
+    len(__word_to_embedding[__words[0]])
 
 def word_to_embedding(w):
     read_embeddings()