annotate nnet_ops.py @ 25:b63e8c0bf21b

added __init__.py, fixed crossentropy_softmax_1hot function name
author bergstrj@iro.umontreal.ca
date Thu, 10 Apr 2008 20:53:44 -0400
parents 2e8be9f5412b
children bf0145fa73e8
rev   line source
24
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
1 import theano
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
2 from theano import tensor, gof, scalar
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
3 import numpy
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
4
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
5 class ScalarSigmoid(scalar.UnaryScalarOp):
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
6 def impl(self, x):
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
7 return 1.0 / (1 + numpy.exp(-x))
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
8 def grad(self, (x,), (gz,)):
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
9 return gz * scalar_sigmoid(x) * (1.0 - scalar_sigmoid(x)),
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
10 def c_foreach(self, (x,), (z,)):
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
11 return "%(z)s = 1.0 / (1 + exp(-%(x)s));" % locals()
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
12 scalar_sigmoid = gof.op.constructor(ScalarSigmoid)
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
13 Sigmoid, sigmoid, SigmoidInplace, sigmoid_inplace \
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
14 = theano.tensor.broadcast(ScalarSigmoid, 'Sigmoid')
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
15
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
16
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
17
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
18 class CrossentropySoftmax1Hot(gof.op.Op):
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
19 """A special compound Op for the output of neural-net classifiers.
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
20
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
21 This Op has two outputs:
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
22 - KL(softmax(x), y)
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
23 - softmax(x)
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
24
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
25 x[i] is assumed to be a dense vector
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
26 softmax(x[i]) is the i'th distribution over len(x[i]) options
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
27 y[i] is an integer index, encoding a 1-hot distribution
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
28
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
29 """
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
30 nin=2
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
31 nout=2
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
32 def __init__(self, x, y_idx,**kwargs):
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
33 x = tensor._as_tensor(x)
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
34 y_idx = tensor._as_tensor(y_idx)
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
35 # TODO: Is this correct? It used to be y, not y_idx
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
36 nll = tensor.Tensor(x.dtype, y_idx.broadcastable)
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
37 # nll = Tensor(x.dtype, y.broadcastable)
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
38 sm = tensor.Tensor(x.dtype, x.broadcastable)
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
39 self.inputs = [x, y_idx]
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
40 self.outputs = [nll,sm]
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
41 def perform(self):
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
42 x, y_idx = [i.data for i in self.inputs]
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
43 sm = numpy.zeros_like(x) # softmax
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
44 nll = numpy.zeros(x.shape[0]) #nll(y | softmax(x))
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
45 for i in xrange(sm.shape[0]):
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
46 sm[i] = numpy.exp(x[i] - numpy.max(x[i])) #softmax
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
47 sm[i] *= 1.0 / numpy.sum(sm[i]) #vector scale
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
48 nll[i] = -numpy.log( sm[i, y_idx[i]]) #cross-entropy
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
49 self.outputs[0].data = nll
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
50 self.outputs[1].data = sm
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
51 def grad(self, (x, y_idx), (g_nll, g_sm)):
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
52 if g_sm is not None:
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
53 raise NotImplementedError()
25
b63e8c0bf21b added __init__.py, fixed crossentropy_softmax_1hot function name
bergstrj@iro.umontreal.ca
parents: 24
diff changeset
54 nll, sm = crossentropy_softmax_1hot(x, y_idx)
24
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
55 dx = CrossentropySoftmax1Hot.Dx(g_nll, sm, y_idx).outputs[0]
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
56 return dx, None
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
57
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
58 class Dx (gof.op.Op):
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
59 nin=3
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
60 nout=1
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
61 """Gradient wrt x of the CrossentropySoftmax1Hot Op"""
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
62 def __init__(self, dy, sm, y_idx,**kwargs):
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
63 dy = tensor._as_tensor(dy)
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
64 sm = tensor._as_tensor(sm)
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
65 y_idx = tensor._as_tensor(y_idx)
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
66 self.inputs = [dy, sm, y_idx]
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
67 self.outputs = [tensor.Tensor(sm.dtype, sm.broadcastable)]
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
68 def perform(self):
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
69 dy,sm,y_idx = [i.data for i in self.inputs]
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
70 dx = numpy.zeros_like(sm)
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
71 for i in xrange(sm.shape[0]):
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
72 dx[i] = dy[i] * sm[i] #vector scale
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
73 dx[i, y_idx[i]] -= dy[i] #scalar decrement
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
74 self.outputs[0].data = dx
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
75 def grad(self, *args):
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
76 raise NotImplementedError()
25
b63e8c0bf21b added __init__.py, fixed crossentropy_softmax_1hot function name
bergstrj@iro.umontreal.ca
parents: 24
diff changeset
77 crossentropy_softmax_1hot = gof.op.constructor(CrossentropySoftmax1Hot)
24
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
78
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
79 #TODO: write a version of CrossentropySoftmax1Hot that accepts a bias for x, if
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
80 # this op needs to be faster.
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
81