diff _test_nnet_ops.py @ 292:174374d59405

merge
author James Bergstra <bergstrj@iro.umontreal.ca>
date Fri, 06 Jun 2008 15:56:18 -0400
parents 2ee53bae9ee0
children 43d9aa93934e
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/_test_nnet_ops.py	Fri Jun 06 15:56:18 2008 -0400
@@ -0,0 +1,41 @@
+
+import unittest
+import theano._test_tensor as TT
+import numpy
+
+from nnet_ops import *
+
+class T_sigmoid(unittest.TestCase):
+    def setUp(self):
+        numpy.random.seed(9999)
+    def test_elemwise(self):
+        TT.verify_grad(self, sigmoid, [numpy.random.rand(3,4)])
+
+class T_softplus(unittest.TestCase):
+    def setUp(self):
+        numpy.random.seed(9999)
+    def test_elemwise(self):
+        TT.verify_grad(self, softplus, [numpy.random.rand(3,4)])
+
+class T_CrossentropySoftmax1Hot(unittest.TestCase):
+    def setUp(self):
+        numpy.random.seed(9999)
+    def test0(self):
+        y_idx = [0,1,3]
+        class Dummy(object):
+            def make_node(self, a,b):
+                return crossentropy_softmax_1hot_with_bias(a, b, y_idx)[0:1]
+        TT.verify_grad(self, Dummy(), [numpy.random.rand(3,4),
+            numpy.random.rand(4)])
+
+    def test1(self):
+        y_idx = [0,1,3]
+        class Dummy(object):
+            def make_node(self, a):
+                return crossentropy_softmax_1hot(a, y_idx)[0:1]
+        TT.verify_grad(self, Dummy(), [numpy.random.rand(3,4)])
+
+
+
+if __name__ == '__main__':
+    unittest.main()