view _test_nnet_ops.py @ 531:90a76a8238e8

Added function length()
author Joseph Turian <turian@iro.umontreal.ca>
date Tue, 18 Nov 2008 00:32:39 -0500
parents 9cfc2fc0f4d1
children
line wrap: on
line source


import unittest
import theano
import theano._test_tensor as TT
import numpy

from nnet_ops import *

class T_sigmoid(unittest.TestCase):
    def setUp(self):
        numpy.random.seed(9999)
    def test_elemwise(self):
        TT.verify_grad(self, sigmoid, [numpy.random.rand(3,4)])

class T_softplus(unittest.TestCase):
    def setUp(self):
        numpy.random.seed(9999)
    def test_elemwise(self):
        TT.verify_grad(self, softplus, [numpy.random.rand(3,4)])

class T_Softmax(unittest.TestCase):
    def setUp(self):
        numpy.random.seed(9999)
    def test0(self):
        class Dummy(object):
            def make_node(self, a):
                return [softmax(a)[:,0]]
        TT.verify_grad(self, Dummy(), [numpy.random.rand(3,4)])
    def test1(self):
        class Dummy(object):
            def make_node(self, a):
                return [softmax(a)[:,1]]
        TT.verify_grad(self, Dummy(), [numpy.random.rand(3,4)])
    def test2(self):
        class Dummy(object):
            def make_node(self, a):
                return [softmax(a)[:,2]]
        TT.verify_grad(self, Dummy(), [numpy.random.rand(3,4)])
    def test3(self):
        class Dummy(object):
            def make_node(self, a):
                return [softmax(a)[:,3]]
        TT.verify_grad(self, Dummy(), [numpy.random.rand(3,4)])


class T_SoftmaxWithBias(unittest.TestCase):
    def setUp(self):
        numpy.random.seed(9999)
    def test0(self):
        class Dummy(object):
            def make_node(self, a, b):
                return [softmax_with_bias(a, b)[:,0]]
        TT.verify_grad(self, Dummy(), [numpy.random.rand(3,4),
            numpy.random.rand(4)])
    def test1(self):
        class Dummy(object):
            def make_node(self, a, b):
                return [softmax_with_bias(a, b)[:,1]]
        TT.verify_grad(self, Dummy(), [numpy.random.rand(3,4),
            numpy.random.rand(4)])
    def test2(self):
        class Dummy(object):
            def make_node(self, a, b):
                return [softmax_with_bias(a, b)[:,2]]
        TT.verify_grad(self, Dummy(), [numpy.random.rand(3,4),
            numpy.random.rand(4)])
    def test3(self):
        class Dummy(object):
            def make_node(self, a, b):
                return [softmax_with_bias(a, b)[:,3]]
        TT.verify_grad(self, Dummy(), [numpy.random.rand(3,4),
            numpy.random.rand(4)])

class T_CrossentropySoftmax1Hot(unittest.TestCase):
    def setUp(self):
        numpy.random.seed(9999)
    def test0(self):
        y_idx = [0,1,3]
        class Dummy(object):
            def make_node(self, a,b):
                return crossentropy_softmax_1hot_with_bias(a, b, y_idx)[0:1]
        TT.verify_grad(self, Dummy(), [numpy.random.rand(3,4),
            numpy.random.rand(4)])

    def test1(self):
        y_idx = [0,1,3]
        class Dummy(object):
            def make_node(self, a):
                return crossentropy_softmax_1hot(a, y_idx)[0:1]
        TT.verify_grad(self, Dummy(), [numpy.random.rand(3,4)])

class T_prepend(unittest.TestCase):
    def test0(self):
        """basic functionality"""
        x=tensor.matrix('x')
        y=Prepend_scalar_constant_to_each_row(4.)(x)
        f=theano.function([x],[y])
        m=numpy.random.rand(3,5)
        my = f(m)
        self.failUnless(my.shape == (3, 6), my.shape)
        self.failUnless(numpy.all( my[:,0] == 4.0))


class T_prepend(unittest.TestCase):
    def test0(self):
        """basic functionality"""
        x=tensor.matrix('x')
        y=Prepend_scalar_to_each_row()(5.,x)
        f=theano.function([x],[y])
        m=numpy.ones((3,5),dtype="float32")
        my = f(m)
        self.failUnless(str(my.dtype) == 'float64')
        self.failUnless(my.shape == (3, 6))
        self.failUnless(numpy.all(my[:,0] == 5.0))

class T_solve(unittest.TestCase):
    def setUp(self):
        self.rng = numpy.random.RandomState(666)

    def test0(self):
        A=self.rng.randn(5,5)
        b=numpy.array(range(5),dtype=float)
        x=numpy.linalg.solve(A,b)
        Ax = numpy.dot(A,x)
        are = theano.gradient.numeric_grad.abs_rel_err(Ax, b)
        self.failUnless(numpy.all(are < 1.0e-5), (are, Ax, b))
        #print A,b
        #print numpy.dot(A,x)


if __name__ == '__main__':
    unittest.main()