diff _nnet_ops.py @ 117:3ef569b92fba

ported nnet_ops to new theano
author James Bergstra <bergstrj@iro.umontreal.ca>
date Wed, 07 May 2008 15:28:17 -0400
parents 76e5c0f37165
children
line wrap: on
line diff
--- a/_nnet_ops.py	Wed May 07 13:07:33 2008 -0400
+++ b/_nnet_ops.py	Wed May 07 15:28:17 2008 -0400
@@ -9,29 +9,31 @@
     def setUp(self):
         numpy.random.seed(9999)
     def test_elemwise(self):
-        TT.verify_grad(self, Sigmoid, [numpy.random.rand(3,4)])
+        TT.verify_grad(self, sigmoid, [numpy.random.rand(3,4)])
 
 class T_softplus(unittest.TestCase):
     def setUp(self):
         numpy.random.seed(9999)
     def test_elemwise(self):
-        TT.verify_grad(self, Softplus, [numpy.random.rand(3,4)])
+        TT.verify_grad(self, softplus, [numpy.random.rand(3,4)])
 
 class T_CrossentropySoftmax1Hot(unittest.TestCase):
     def setUp(self):
         numpy.random.seed(9999)
     def test0(self):
         y_idx = [0,1,3]
-        def output1(a,b):
-            return crossentropy_softmax_1hot_with_bias(a, b, y_idx)[0:1]
-        TT.verify_grad(self, output1, [numpy.random.rand(3,4),
+        class Dummy(object):
+            def make_node(self, a,b):
+                return crossentropy_softmax_1hot_with_bias(a, b, y_idx)[0:1]
+        TT.verify_grad(self, Dummy(), [numpy.random.rand(3,4),
             numpy.random.rand(4)])
 
     def test1(self):
         y_idx = [0,1,3]
-        def output1(a):
-            return crossentropy_softmax_1hot(a, y_idx)[0:1]
-        TT.verify_grad(self, output1, [numpy.random.rand(3,4)])
+        class Dummy(object):
+            def make_node(self, a):
+                return crossentropy_softmax_1hot(a, y_idx)[0:1]
+        TT.verify_grad(self, Dummy(), [numpy.random.rand(3,4)])