changeset 25:b63e8c0bf21b

added __init__.py, fixed crossentropy_softmax_1hot function name
author bergstrj@iro.umontreal.ca
date Thu, 10 Apr 2008 20:53:44 -0400
parents 2e8be9f5412b
children e6c550cb2896
files __init__.py _nnet_ops.py nnet_ops.py
diffstat 2 files changed, 3 insertions(+), 3 deletions(-) [+]
line wrap: on
line diff
--- a/_nnet_ops.py	Thu Apr 10 17:25:13 2008 -0400
+++ b/_nnet_ops.py	Thu Apr 10 20:53:44 2008 -0400
@@ -18,7 +18,7 @@
     def test0(self):
         y_idx = [0,1,3]
         def output1(a):
-            return cross_entropy_softmax_1hot(a, y_idx)[0:1]
+            return crossentropy_softmax_1hot(a, y_idx)[0:1]
         TT.verify_grad(self, output1, [numpy.random.rand(3,4)])
 
 
--- a/nnet_ops.py	Thu Apr 10 17:25:13 2008 -0400
+++ b/nnet_ops.py	Thu Apr 10 20:53:44 2008 -0400
@@ -51,7 +51,7 @@
     def grad(self, (x, y_idx), (g_nll, g_sm)):
         if g_sm is not None:
             raise NotImplementedError()
-        nll, sm = cross_entropy_softmax_1hot(x, y_idx)
+        nll, sm = crossentropy_softmax_1hot(x, y_idx)
         dx = CrossentropySoftmax1Hot.Dx(g_nll, sm, y_idx).outputs[0]
         return dx, None
 
@@ -74,7 +74,7 @@
             self.outputs[0].data = dx
         def grad(self, *args):
             raise NotImplementedError()
-cross_entropy_softmax_1hot = gof.op.constructor(CrossentropySoftmax1Hot)
+crossentropy_softmax_1hot = gof.op.constructor(CrossentropySoftmax1Hot)
 
 #TODO: write a version of CrossentropySoftmax1Hot that accepts a bias for x, if
 # this op needs to be faster.