Mercurial > pylearn
changeset 893:912be602c3ac
fix import for the move of shared var outside of the sandbox.
author | Frederic Bastien <nouiz@nouiz.org> |
---|---|
date | Wed, 27 Jan 2010 09:56:37 -0500 |
parents | a13142cbeabd |
children | 032911ac4941 |
files | pylearn/dataset_ops/gldataset.py pylearn/dataset_ops/tests/test_cifar10.py pylearn/shared/layers/exponential_mean.py pylearn/shared/layers/kouh2008.py pylearn/shared/layers/lecun1998.py pylearn/shared/layers/logreg.py pylearn/shared/layers/rust2005.py pylearn/shared/layers/sandbox/linsvm.py pylearn/shared/layers/sgd.py pylearn/shared/layers/sigmoidal_layer.py pylearn/shared/layers/tests/test_kouh2008.py pylearn/shared/layers/tests/test_sigmoidal_layer.py |
diffstat | 12 files changed, 13 insertions(+), 13 deletions(-) [+] |
line wrap: on
line diff
--- a/pylearn/dataset_ops/gldataset.py Thu Jan 21 13:17:09 2010 -0500 +++ b/pylearn/dataset_ops/gldataset.py Wed Jan 27 09:56:37 2010 -0500 @@ -17,8 +17,8 @@ import numpy import theano -from theano.compile.sandbox import shared -from theano.compile.sandbox import pfunc as function +from theano.compile import shared +from theano.compile import pfunc as function _logger = logging.getLogger('gldataset') def debug(*msg): _logger.debug(' '.join(str(m) for m in msg))
--- a/pylearn/dataset_ops/tests/test_cifar10.py Thu Jan 21 13:17:09 2010 -0500 +++ b/pylearn/dataset_ops/tests/test_cifar10.py Wed Jan 27 09:56:37 2010 -0500 @@ -1,7 +1,7 @@ import unittest import numpy import theano -from theano.compile.sandbox import pfunc, shared +from theano.compile import pfunc, shared from theano import tensor from pylearn.dataset_ops.cifar10 import cifar10, forget
--- a/pylearn/shared/layers/exponential_mean.py Thu Jan 21 13:17:09 2010 -0500 +++ b/pylearn/shared/layers/exponential_mean.py Wed Jan 27 09:56:37 2010 -0500 @@ -6,7 +6,7 @@ import numpy import theano import theano.tensor -from theano.compile.sandbox import shared +from theano.compile import shared class ExponentialMean(object): """Maintain an exponentially-decaying estimate of the mean
--- a/pylearn/shared/layers/kouh2008.py Thu Jan 21 13:17:09 2010 -0500 +++ b/pylearn/shared/layers/kouh2008.py Wed Jan 27 09:56:37 2010 -0500 @@ -20,7 +20,7 @@ from theano import tensor from theano.tensor.nnet import softplus from theano.sandbox.softsign import softsign -from theano.compile.sandbox import shared +from theano.compile import shared from pylearn.shared.layers.util import add_logging, update_locals try:
--- a/pylearn/shared/layers/lecun1998.py Thu Jan 21 13:17:09 2010 -0500 +++ b/pylearn/shared/layers/lecun1998.py Wed Jan 27 09:56:37 2010 -0500 @@ -6,7 +6,7 @@ import theano from theano import tensor -from theano.compile.sandbox import shared, pfunc +from theano.compile import shared, pfunc from theano.sandbox.conv import ConvOp from theano.sandbox.downsample import DownsampleFactorMax
--- a/pylearn/shared/layers/logreg.py Thu Jan 21 13:17:09 2010 -0500 +++ b/pylearn/shared/layers/logreg.py Wed Jan 27 09:56:37 2010 -0500 @@ -2,7 +2,7 @@ """ import numpy import theano -from theano.compile.sandbox import shared +from theano.compile import shared from theano.tensor import nnet from pylearn.shared.layers.util import update_locals, add_logging
--- a/pylearn/shared/layers/rust2005.py Thu Jan 21 13:17:09 2010 -0500 +++ b/pylearn/shared/layers/rust2005.py Wed Jan 27 09:56:37 2010 -0500 @@ -25,7 +25,7 @@ import theano import theano.tensor import theano.tensor.nnet -from theano.compile.sandbox import shared +from theano.compile import shared from theano.sandbox.softsign import softsign from theano.tensor.nnet import softplus from theano.sandbox.conv import ConvOp
--- a/pylearn/shared/layers/sandbox/linsvm.py Thu Jan 21 13:17:09 2010 -0500 +++ b/pylearn/shared/layers/sandbox/linsvm.py Wed Jan 27 09:56:37 2010 -0500 @@ -1,6 +1,6 @@ import numpy import theano -from theano.compile.sandbox import shared +from theano.compile import shared from theano.tensor import nnet from .util import update_locals
--- a/pylearn/shared/layers/sgd.py Thu Jan 21 13:17:09 2010 -0500 +++ b/pylearn/shared/layers/sgd.py Wed Jan 27 09:56:37 2010 -0500 @@ -4,7 +4,7 @@ import numpy import theano from theano import tensor -from theano.compile.sandbox import shared +from theano.compile import shared class StochasticGradientDescent(object): """Fixed stepsize gradient descent
--- a/pylearn/shared/layers/sigmoidal_layer.py Thu Jan 21 13:17:09 2010 -0500 +++ b/pylearn/shared/layers/sigmoidal_layer.py Wed Jan 27 09:56:37 2010 -0500 @@ -6,7 +6,7 @@ import theano from theano import tensor -from theano.compile.sandbox import shared, pfunc +from theano.compile import shared, pfunc from pylearn.shared.layers.util import update_locals, add_logging from pylearn.shared.layers.squash import squash
--- a/pylearn/shared/layers/tests/test_kouh2008.py Thu Jan 21 13:17:09 2010 -0500 +++ b/pylearn/shared/layers/tests/test_kouh2008.py Wed Jan 27 09:56:37 2010 -0500 @@ -1,7 +1,7 @@ import numpy import theano.compile.debugmode from theano import tensor -from theano.compile.sandbox import pfunc +from theano.compile import pfunc from pylearn.shared.layers import LogisticRegression, Kouh2008 def test_dtype():
--- a/pylearn/shared/layers/tests/test_sigmoidal_layer.py Thu Jan 21 13:17:09 2010 -0500 +++ b/pylearn/shared/layers/tests/test_sigmoidal_layer.py Wed Jan 27 09:56:37 2010 -0500 @@ -1,7 +1,7 @@ import numpy from pylearn.shared.layers import SigmoidalLayer, LogisticRegression from theano import tensor -from theano.compile.sandbox import shared, pfunc +from theano.compile import shared, pfunc def test_w_random(dtype='float64'): if dtype == 'float64':