diff nnet_ops.py @ 25:b63e8c0bf21b

added __init__.py, fixed crossentropy_softmax_1hot function name
author bergstrj@iro.umontreal.ca
date Thu, 10 Apr 2008 20:53:44 -0400
parents 2e8be9f5412b
children bf0145fa73e8
line wrap: on
line diff
--- a/nnet_ops.py	Thu Apr 10 17:25:13 2008 -0400
+++ b/nnet_ops.py	Thu Apr 10 20:53:44 2008 -0400
@@ -51,7 +51,7 @@
     def grad(self, (x, y_idx), (g_nll, g_sm)):
         if g_sm is not None:
             raise NotImplementedError()
-        nll, sm = cross_entropy_softmax_1hot(x, y_idx)
+        nll, sm = crossentropy_softmax_1hot(x, y_idx)
         dx = CrossentropySoftmax1Hot.Dx(g_nll, sm, y_idx).outputs[0]
         return dx, None
 
@@ -74,7 +74,7 @@
             self.outputs[0].data = dx
         def grad(self, *args):
             raise NotImplementedError()
-cross_entropy_softmax_1hot = gof.op.constructor(CrossentropySoftmax1Hot)
+crossentropy_softmax_1hot = gof.op.constructor(CrossentropySoftmax1Hot)
 
 #TODO: write a version of CrossentropySoftmax1Hot that accepts a bias for x, if
 # this op needs to be faster.