Mercurial > pylearn
diff _test_nnet_ops.py @ 286:2ee53bae9ee0
renamed _nnet_ops.py to _test_nnet_opt.py to be used with autotest
author | Frederic Bastien <bastienf@iro.umontreal.ca> |
---|---|
date | Fri, 06 Jun 2008 13:55:59 -0400 |
parents | _nnet_ops.py@3ef569b92fba |
children | 43d9aa93934e |
line wrap: on
line diff
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/_test_nnet_ops.py Fri Jun 06 13:55:59 2008 -0400 @@ -0,0 +1,41 @@ + +import unittest +import theano._test_tensor as TT +import numpy + +from nnet_ops import * + +class T_sigmoid(unittest.TestCase): + def setUp(self): + numpy.random.seed(9999) + def test_elemwise(self): + TT.verify_grad(self, sigmoid, [numpy.random.rand(3,4)]) + +class T_softplus(unittest.TestCase): + def setUp(self): + numpy.random.seed(9999) + def test_elemwise(self): + TT.verify_grad(self, softplus, [numpy.random.rand(3,4)]) + +class T_CrossentropySoftmax1Hot(unittest.TestCase): + def setUp(self): + numpy.random.seed(9999) + def test0(self): + y_idx = [0,1,3] + class Dummy(object): + def make_node(self, a,b): + return crossentropy_softmax_1hot_with_bias(a, b, y_idx)[0:1] + TT.verify_grad(self, Dummy(), [numpy.random.rand(3,4), + numpy.random.rand(4)]) + + def test1(self): + y_idx = [0,1,3] + class Dummy(object): + def make_node(self, a): + return crossentropy_softmax_1hot(a, y_idx)[0:1] + TT.verify_grad(self, Dummy(), [numpy.random.rand(3,4)]) + + + +if __name__ == '__main__': + unittest.main()