view _test_nnet_ops.py @ 382:b4efd192d880

Moved xent loss to nnet_ups
author Joseph Turian <turian@gmail.com>
date Tue, 08 Jul 2008 01:58:16 -0400
parents 2ee53bae9ee0
children 43d9aa93934e
line wrap: on
line source


import unittest
import theano._test_tensor as TT
import numpy

from nnet_ops import *

class T_sigmoid(unittest.TestCase):
    def setUp(self):
        numpy.random.seed(9999)
    def test_elemwise(self):
        TT.verify_grad(self, sigmoid, [numpy.random.rand(3,4)])

class T_softplus(unittest.TestCase):
    def setUp(self):
        numpy.random.seed(9999)
    def test_elemwise(self):
        TT.verify_grad(self, softplus, [numpy.random.rand(3,4)])

class T_CrossentropySoftmax1Hot(unittest.TestCase):
    def setUp(self):
        numpy.random.seed(9999)
    def test0(self):
        y_idx = [0,1,3]
        class Dummy(object):
            def make_node(self, a,b):
                return crossentropy_softmax_1hot_with_bias(a, b, y_idx)[0:1]
        TT.verify_grad(self, Dummy(), [numpy.random.rand(3,4),
            numpy.random.rand(4)])

    def test1(self):
        y_idx = [0,1,3]
        class Dummy(object):
            def make_node(self, a):
                return crossentropy_softmax_1hot(a, y_idx)[0:1]
        TT.verify_grad(self, Dummy(), [numpy.random.rand(3,4)])



if __name__ == '__main__':
    unittest.main()