Mercurial > pylearn
view _test_nnet_ops.py @ 422:32c5f87bc54e
Added __len__ to HStackedDataSet and replaced default len() by sys.maxint instead of None
author | Yoshua Bengio <bengioy@iro.umontreal.ca> |
---|---|
date | Sat, 19 Jul 2008 14:12:41 -0400 |
parents | 43d9aa93934e |
children | 9cfc2fc0f4d1 |
line wrap: on
line source
import unittest import theano import theano._test_tensor as TT import numpy from nnet_ops import * class T_sigmoid(unittest.TestCase): def setUp(self): numpy.random.seed(9999) def test_elemwise(self): TT.verify_grad(self, sigmoid, [numpy.random.rand(3,4)]) class T_softplus(unittest.TestCase): def setUp(self): numpy.random.seed(9999) def test_elemwise(self): TT.verify_grad(self, softplus, [numpy.random.rand(3,4)]) class T_CrossentropySoftmax1Hot(unittest.TestCase): def setUp(self): numpy.random.seed(9999) def test0(self): y_idx = [0,1,3] class Dummy(object): def make_node(self, a,b): return crossentropy_softmax_1hot_with_bias(a, b, y_idx)[0:1] TT.verify_grad(self, Dummy(), [numpy.random.rand(3,4), numpy.random.rand(4)]) def test1(self): y_idx = [0,1,3] class Dummy(object): def make_node(self, a): return crossentropy_softmax_1hot(a, y_idx)[0:1] TT.verify_grad(self, Dummy(), [numpy.random.rand(3,4)]) class T_prepend(unittest.TestCase): def test0(self): """basic functionality""" x=tensor.matrix('x') y=Prepend_scalar_constant_to_each_row(4.)(x) f=theano.function([x],[y]) m=numpy.random.rand(3,5) my = f(m) self.failUnless(my.shape == (3, 6), my.shape) self.failUnless(numpy.all( my[:,0] == 4.0)) class T_prepend(unittest.TestCase): def test0(self): """basic functionality""" x=tensor.matrix('x') y=Prepend_scalar_to_each_row()(5.,x) f=theano.function([x],[y]) m=numpy.ones((3,5),dtype="float32") my = f(m) self.failUnless(str(my.dtype) == 'float64') self.failUnless(my.shape == (3, 6)) self.failUnless(numpy.all(my[:,0] == 5.0)) class T_solve(unittest.TestCase): def setUp(self): self.rng = numpy.random.RandomState(666) def test0(self): A=self.rng.randn(5,5) b=numpy.array(range(5),dtype=float) x=numpy.linalg.solve(A,b) Ax = numpy.dot(A,x) are = theano.gradient.numeric_grad.abs_rel_err(Ax, b) self.failUnless(numpy.all(are < 1.0e-5), (are, Ax, b)) #print A,b #print numpy.dot(A,x) if __name__ == '__main__': unittest.main()