Mercurial > pylearn
changeset 69:8c2607f387e6
added softplus, elaborated sigmoid
author | James Bergstra <bergstrj@iro.umontreal.ca> |
---|---|
date | Mon, 21 Apr 2008 15:23:49 -0400 |
parents | 315eb36ff954 |
children | 76e5c0f37165 |
files | _nnet_ops.py nnet_ops.py |
diffstat | 2 files changed, 63 insertions(+), 9 deletions(-) [+] |
line wrap: on
line diff
--- a/_nnet_ops.py Fri Apr 18 03:49:17 2008 -0400 +++ b/_nnet_ops.py Mon Apr 21 15:23:49 2008 -0400 @@ -11,6 +11,11 @@ def test_elemwise(self): TT.verify_grad(self, Sigmoid, [numpy.random.rand(3,4)]) +class T_softplus(unittest.TestCase): + def setUp(self): + numpy.random.seed(9999) + def test_elemwise(self): + TT.verify_grad(self, Softplus, [numpy.random.rand(3,4)]) class T_CrossentropySoftmax1Hot(unittest.TestCase): def setUp(self):
--- a/nnet_ops.py Fri Apr 18 03:49:17 2008 -0400 +++ b/nnet_ops.py Mon Apr 21 15:23:49 2008 -0400 @@ -2,17 +2,67 @@ from theano import tensor, gof, scalar import numpy -class ScalarSigmoid(scalar.UnaryScalarOp): +############ +# +# SCALAR OPS +# + +class ScalarSigmoid(scalar.FloatUnaryScalarOp): + @staticmethod + def st_impl(x): + if x < -30.0: + return 0.0 + if x > 30.0: + return 1.0 + return 1.0 / (1.0 + numpy.exp(-x)) def impl(self, x): - return 1.0 / (1 + numpy.exp(-x)) + return ScalarSigmoid.st_impl(x) def grad(self, (x,), (gz,)): - return gz * scalar_sigmoid(x) * (1.0 - scalar_sigmoid(x)), - def c_foreach(self, (x,), (z,)): - return "%(z)s = 1.0 / (1 + exp(-%(x)s));" % locals() + y = scalar_sigmoid(x) + return [gz * y * (1.0 - y)] + def c_foreach(self, (x,), (z,), sub): + if 'float' in self.inputs[0].dtype: + return """%(z)s = + %(x)s < -30.0 + ? 0.0 + : %(x)s > 30.0 + ? 1.0 + : 1.0 /(1.0+exp(-%(x)s));""" % locals() + raise NotImplementedError('only floatingpoint is implemented') scalar_sigmoid = gof.op.constructor(ScalarSigmoid) -Sigmoid, sigmoid, SigmoidInplace, sigmoid_inplace \ - = theano.tensor.broadcast(ScalarSigmoid, 'Sigmoid') +Sigmoid, sigmoid, SigmoidInplace, sigmoid_inplace =\ + tensor.broadcast(ScalarSigmoid, 'Sigmoid') +class ScalarSoftplus(scalar.FloatUnaryScalarOp): + @staticmethod + def static_impl(x): + if x < -30.0: + return 0.0 + if x > 30.0: + return x + return numpy.log1p(numpy.exp(x)) + def impl(self, x): + return ScalarSoftplus.static_impl(x) + def grad(self, (x,), (gz,)): + return [gz * scalar_sigmoid(x)] + def c_foreach(self, (x,), (z,), sub): + if 'float' in self.inputs[0].dtype: + return """%(z)s = + %(x)s < -30.0 + ? 0.0 + : %(x)s > 30.0 + ? %(x)s + : log1p(exp(%(x)s));""" % locals() + raise NotImplementedError('only floating point x is implemented') +scalar_softplus = gof.op.constructor(ScalarSoftplus) +Softplus, softplus, SoftplusInplace, softplus_inplace =\ + tensor.broadcast(ScalarSoftplus, 'Softplus') + + +############ +# +# TENSOR OPS +# class CrossentropySoftmax1Hot(gof.op.Op): @@ -218,8 +268,6 @@ } """ % dict(locals(), **sub) - - crossentropy_softmax_1hot = gof.op.constructor(CrossentropySoftmax1Hot) class CrossentropySoftmax1HotDx (gof.op.Op): @@ -299,3 +347,4 @@ dx_i[y_i * Sdx] -= dnll_i; } """ % dict(locals(), **sub) +