changeset 907:6d4f98f86514

fix import and fix method name change. This uncover other change not reflected into the test.
author Frederic Bastien <nouiz@nouiz.org>
date Thu, 18 Mar 2010 11:18:20 -0400
parents d1a757d17e19
children 8e3f1d852ab1
files pylearn/shared/layers/kording2004.py pylearn/shared/layers/tests/test_kouh2008.py
diffstat 2 files changed, 5 insertions(+), 6 deletions(-) [+]
line wrap: on
line diff
--- a/pylearn/shared/layers/kording2004.py	Thu Mar 18 10:49:09 2010 -0400
+++ b/pylearn/shared/layers/kording2004.py	Thu Mar 18 11:18:20 2010 -0400
@@ -1,7 +1,6 @@
 import numpy
 import theano.tensor
-from hpu.theano_outgoing import mean, var, cov
-
+from theano.tensor.basic import mean
 from pylearn.shared.layers.exponential_mean import ExponentialMean # exponential_mean.py
 
 import logging
--- a/pylearn/shared/layers/tests/test_kouh2008.py	Thu Mar 18 10:49:09 2010 -0400
+++ b/pylearn/shared/layers/tests/test_kouh2008.py	Thu Mar 18 11:18:20 2010 -0400
@@ -9,9 +9,9 @@
     n_out = 10
     n_terms = 3
     rng = numpy.random.RandomState(23455)
-    layer = Kouh2008.new_filters(rng, tensor.dmatrix(), n_in, n_out, n_terms, dtype='float64')
+    layer = Kouh2008.new_filters_expbounds(rng, tensor.dmatrix(), n_in, n_out, n_terms, dtype='float64')
     assert layer.output.dtype =='float64'
-    layer = Kouh2008.new_filters(rng, tensor.fmatrix(), n_in, n_out, n_terms, dtype='float32')
+    layer = Kouh2008.new_filters_expbounds(rng, tensor.fmatrix(), n_in, n_out, n_terms, dtype='float32')
     assert layer.output.dtype =='float32'
 
 def run_w_random(bsize=10, n_iter=200, n_in = 1024, n_out = 100, n_terms=2, dtype='float64'):
@@ -19,7 +19,7 @@
     y = tensor.lvector()
     rng = numpy.random.RandomState(23455)
 
-    layer = Kouh2008.new_filters(rng, x, n_in, n_out, n_terms, dtype='float64')
+    layer = Kouh2008.new_filters_expbounds(rng, x, n_in, n_out, n_terms, dtype='float64')
     out = LogisticRegression.new(layer.output, n_out, 2)
     cost = out.nll(y).sum()
 
@@ -52,7 +52,7 @@
     y = tensor.lvector()
     rng = numpy.random.RandomState(23455)
 
-    layer = Kouh2008.new_filters(rng, x, n_in, n_out, n_terms, dtype='float64')
+    layer = Kouh2008.new_filters_expbounds(rng, x, n_in, n_out, n_terms, dtype='float64')
     out = LogisticRegression.new(layer.output, n_out, 2)
     cost = out.nll(y).sum()
     #joint optimization except for one of the linear filters