annotate nnet_ops.py @ 262:14b9779622f9

Split LearningAlgorithm into OfflineLearningAlgorithm and OnlineLearningAlgorithm
author Yoshua Bengio <bengioy@iro.umontreal.ca>
date Tue, 03 Jun 2008 21:34:24 -0400
parents f6a7eb1b7970
children e4473d9697d7
rev   line source
24
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
1 import theano
117
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
2 from theano import tensor, scalar
24
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
3 import numpy
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
4
69
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
5 ############
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
6 #
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
7 # SCALAR OPS
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
8 #
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
9
117
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
10 class ScalarSigmoid(scalar.UnaryScalarOp):
69
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
11 @staticmethod
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
12 def st_impl(x):
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
13 if x < -30.0:
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
14 return 0.0
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
15 if x > 30.0:
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
16 return 1.0
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
17 return 1.0 / (1.0 + numpy.exp(-x))
24
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
18 def impl(self, x):
69
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
19 return ScalarSigmoid.st_impl(x)
24
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
20 def grad(self, (x,), (gz,)):
69
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
21 y = scalar_sigmoid(x)
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
22 return [gz * y * (1.0 - y)]
181
1b06bc2c3ca9 fixed c_code for the ops in nnet_ops.py
Olivier Breuleux <breuleuo@iro.umontreal.ca>
parents: 121
diff changeset
23 def c_code(self, node, name, (x,), (z,), sub):
1b06bc2c3ca9 fixed c_code for the ops in nnet_ops.py
Olivier Breuleux <breuleuo@iro.umontreal.ca>
parents: 121
diff changeset
24 if node.inputs[0].type in [scalar.float32, scalar.float64]:
69
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
25 return """%(z)s =
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
26 %(x)s < -30.0
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
27 ? 0.0
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
28 : %(x)s > 30.0
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
29 ? 1.0
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
30 : 1.0 /(1.0+exp(-%(x)s));""" % locals()
181
1b06bc2c3ca9 fixed c_code for the ops in nnet_ops.py
Olivier Breuleux <breuleuo@iro.umontreal.ca>
parents: 121
diff changeset
31 raise NotImplementedError('only floatingpoint is implemented')
117
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
32 scalar_sigmoid = ScalarSigmoid(scalar.upgrade_to_float, name='scalar_sigmoid')
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
33 sigmoid = tensor.Elemwise(scalar_sigmoid, name='sigmoid')
24
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
34
117
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
35 class ScalarSoftplus(scalar.UnaryScalarOp):
69
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
36 @staticmethod
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
37 def static_impl(x):
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
38 if x < -30.0:
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
39 return 0.0
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
40 if x > 30.0:
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
41 return x
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
42 return numpy.log1p(numpy.exp(x))
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
43 def impl(self, x):
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
44 return ScalarSoftplus.static_impl(x)
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
45 def grad(self, (x,), (gz,)):
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
46 return [gz * scalar_sigmoid(x)]
222
f6a7eb1b7970 redo what James had done, so invert node and name
Thierry Bertin-Mahieux <bertinmt@iro.umontreal.ca>
parents: 218
diff changeset
47 def c_code(self, node, name, (x,), (z,), sub):
181
1b06bc2c3ca9 fixed c_code for the ops in nnet_ops.py
Olivier Breuleux <breuleuo@iro.umontreal.ca>
parents: 121
diff changeset
48 if node.inputs[0].type in [scalar.float32, scalar.float64]:
69
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
49 return """%(z)s =
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
50 %(x)s < -30.0
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
51 ? 0.0
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
52 : %(x)s > 30.0
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
53 ? %(x)s
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
54 : log1p(exp(%(x)s));""" % locals()
181
1b06bc2c3ca9 fixed c_code for the ops in nnet_ops.py
Olivier Breuleux <breuleuo@iro.umontreal.ca>
parents: 121
diff changeset
55 raise NotImplementedError('only floating point x is implemented')
117
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
56 scalar_softplus = ScalarSoftplus(scalar.upgrade_to_float, name='scalar_softplus')
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
57 softplus = tensor.Elemwise(scalar_softplus, name='softplus')
69
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
58
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
59
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
60 ############
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
61 #
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
62 # TENSOR OPS
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
63 #
24
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
64
117
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
65 class CrossentropySoftmax1HotWithBias(theano.Op):
70
76e5c0f37165 better docs & precondition testing for cross_entropy_softmax_1hot & friends
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 69
diff changeset
66 """A special compound L{Op} for the output of neural-net classifiers.
76e5c0f37165 better docs & precondition testing for cross_entropy_softmax_1hot & friends
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 69
diff changeset
67
76e5c0f37165 better docs & precondition testing for cross_entropy_softmax_1hot & friends
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 69
diff changeset
68 @type x: is a matrix of floats (32 or 64)
76e5c0f37165 better docs & precondition testing for cross_entropy_softmax_1hot & friends
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 69
diff changeset
69 @type b: is a [row] vector of floats (32 or 64), length is number of cols in x
76e5c0f37165 better docs & precondition testing for cross_entropy_softmax_1hot & friends
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 69
diff changeset
70 @type y_idx: a [column] vector of int (32 or 64), length is number of rows in x
76e5c0f37165 better docs & precondition testing for cross_entropy_softmax_1hot & friends
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 69
diff changeset
71
76e5c0f37165 better docs & precondition testing for cross_entropy_softmax_1hot & friends
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 69
diff changeset
72 @precondition: every entry in y_idx is a valid (non-negative) column index into x
24
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
73
70
76e5c0f37165 better docs & precondition testing for cross_entropy_softmax_1hot & friends
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 69
diff changeset
74 This L{Op} has two outputs:
76e5c0f37165 better docs & precondition testing for cross_entropy_softmax_1hot & friends
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 69
diff changeset
75 - KL(softmax(x+b), y)
76e5c0f37165 better docs & precondition testing for cross_entropy_softmax_1hot & friends
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 69
diff changeset
76 - softmax(x+b)
24
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
77
70
76e5c0f37165 better docs & precondition testing for cross_entropy_softmax_1hot & friends
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 69
diff changeset
78
24
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
79 softmax(x[i]) is the i'th distribution over len(x[i]) options
70
76e5c0f37165 better docs & precondition testing for cross_entropy_softmax_1hot & friends
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 69
diff changeset
80
76e5c0f37165 better docs & precondition testing for cross_entropy_softmax_1hot & friends
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 69
diff changeset
81 y_idx[i] is an integer index, encoding a 1-hot distribution.
76e5c0f37165 better docs & precondition testing for cross_entropy_softmax_1hot & friends
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 69
diff changeset
82
76e5c0f37165 better docs & precondition testing for cross_entropy_softmax_1hot & friends
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 69
diff changeset
83 In practice, when we're trying to do classification, we have one row in x
76e5c0f37165 better docs & precondition testing for cross_entropy_softmax_1hot & friends
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 69
diff changeset
84 and y_idx per example, and y[i] is the index of the (correct) class of the
76e5c0f37165 better docs & precondition testing for cross_entropy_softmax_1hot & friends
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 69
diff changeset
85 i'th example.
24
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
86
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
87 """
70
76e5c0f37165 better docs & precondition testing for cross_entropy_softmax_1hot & friends
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 69
diff changeset
88 nin=3
24
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
89 nout=2
117
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
90 def __init__(self, **kwargs):
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
91 theano.Op.__init__(self, **kwargs)
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
92
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
93 def make_node(self, x, b, y_idx):
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
94 x = tensor.as_tensor(x)
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
95 b = tensor.as_tensor(b)
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
96 y_idx = tensor.as_tensor(y_idx)
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
97 if x.type.ndim != 2 \
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
98 or x.type.dtype not in ['float32', 'float64']:
30
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
99 raise ValueError('x must be 2-d tensor of floats')
117
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
100 if b.type.ndim != 1 \
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
101 or x.type.dtype not in ['float32', 'float64']:
121
2ca8dccba270 debugging mlp.py
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 117
diff changeset
102 raise ValueError('b must be 1-d tensor of floats')
117
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
103 if y_idx.type.ndim != 1 \
185
3d953844abd3 support for more int types in crossentropysoftmax1hot
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 184
diff changeset
104 or y_idx.type.dtype not in ['int8', 'int16', 'int32', 'int64']:
121
2ca8dccba270 debugging mlp.py
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 117
diff changeset
105 raise ValueError('y_idx must be 1-d tensor of ints')
30
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
106
24
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
107 # TODO: Is this correct? It used to be y, not y_idx
117
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
108 nll = tensor.Tensor(x.type.dtype,
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
109 y_idx.type.broadcastable).make_result()
24
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
110 # nll = Tensor(x.dtype, y.broadcastable)
117
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
111 sm = x.type.make_result()
185
3d953844abd3 support for more int types in crossentropysoftmax1hot
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 184
diff changeset
112 return theano.Apply(self, [x, b, y_idx], [nll, sm])
117
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
113 def perform(self, node, input_storage, output_storage):
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
114 x, b, y_idx = input_storage
30
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
115 if b.shape[0] != x.shape[1]:
70
76e5c0f37165 better docs & precondition testing for cross_entropy_softmax_1hot & friends
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 69
diff changeset
116 raise ValueError('b must have same number of columns as x')
76e5c0f37165 better docs & precondition testing for cross_entropy_softmax_1hot & friends
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 69
diff changeset
117 if y_idx.shape[0] != x.shape[0]:
76e5c0f37165 better docs & precondition testing for cross_entropy_softmax_1hot & friends
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 69
diff changeset
118 raise ValueError('y_idx must have same number of rows as x')
30
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
119
24
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
120 sm = numpy.zeros_like(x) # softmax
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
121 nll = numpy.zeros(x.shape[0]) #nll(y | softmax(x))
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
122 for i in xrange(sm.shape[0]):
30
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
123 row = x[i] + b
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
124 sm[i] = numpy.exp(row - numpy.max(row)) #softmax
24
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
125 sm[i] *= 1.0 / numpy.sum(sm[i]) #vector scale
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
126 nll[i] = -numpy.log( sm[i, y_idx[i]]) #cross-entropy
117
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
127 output_storage[0][0] = nll
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
128 output_storage[1][0] = sm
30
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
129 def grad(self, (x, b, y_idx), (g_nll, g_sm)):
24
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
130 if g_sm is not None:
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
131 raise NotImplementedError()
70
76e5c0f37165 better docs & precondition testing for cross_entropy_softmax_1hot & friends
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 69
diff changeset
132 nll, sm = crossentropy_softmax_1hot_with_bias(x, b, y_idx)
117
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
133 dx = CrossentropySoftmax1HotWithBiasDx()(g_nll, sm, y_idx)
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
134 db = tensor.sum(dx, axis = [0])
30
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
135 return dx, db, None
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
136
67
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
137 def c_headers(self): return ['<iostream>']
181
1b06bc2c3ca9 fixed c_code for the ops in nnet_ops.py
Olivier Breuleux <breuleuo@iro.umontreal.ca>
parents: 121
diff changeset
138 def c_code(self, node, name, (x, b, y_idx), (nll, sm), sub):
30
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
139 # this implementation was lifted from
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
140 # /u/bergstrj/cvs/bergstrj/src/feb07/nn.cxx
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
141
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
142 #TODO: put this into a templated function, in the support code
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
143 #TODO: declare the max of each row as an Op output
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
144
32
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
145 #TODO: set error messages for failures in this code
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
146
184
9a2aecc57a79 added TODO to nnet_ops
Olivier Breuleux <breuleuo@iro.umontreal.ca>
parents: 181
diff changeset
147 #TODO: use this to accept float32 and int32: node.inputs[0].type.dtype_specs()[1]
185
3d953844abd3 support for more int types in crossentropysoftmax1hot
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 184
diff changeset
148 y_idx_type = node.inputs[2].type.dtype_specs()[1]
184
9a2aecc57a79 added TODO to nnet_ops
Olivier Breuleux <breuleuo@iro.umontreal.ca>
parents: 181
diff changeset
149
30
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
150 return """
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
151 npy_intp* Nx = %(x)s->dimensions;
34
1b152f46ad0c consolidated code
bergstrj@iro.umontreal.ca
parents: 32
diff changeset
152
67
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
153 if (%(x)s->nd != 2)
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
154 {
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
155 PyErr_SetString(PyExc_ValueError, "a not 2d tensor");
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
156 %(fail)s;
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
157 }
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
158 if (%(b)s->nd != 1)
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
159 {
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
160 PyErr_SetString(PyExc_ValueError, "b not 1d tensor");
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
161 %(fail)s;
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
162 }
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
163 if (%(y_idx)s->nd != 1)
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
164 {
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
165 PyErr_SetString(PyExc_ValueError, "y_idx not 1d tensor");
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
166 %(fail)s;
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
167 }
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
168 if (%(x)s->descr->type_num != PyArray_DOUBLE)
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
169 {
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
170 PyErr_SetString(PyExc_TypeError, "a not float64");
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
171 %(fail)s;
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
172 }
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
173 if (%(b)s->descr->type_num != PyArray_DOUBLE)
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
174 {
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
175 PyErr_SetString(PyExc_TypeError, "b not float64");
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
176 %(fail)s;
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
177 }
185
3d953844abd3 support for more int types in crossentropysoftmax1hot
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 184
diff changeset
178 if ((%(y_idx)s->descr->type_num != PyArray_INT64)
3d953844abd3 support for more int types in crossentropysoftmax1hot
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 184
diff changeset
179 && (%(y_idx)s->descr->type_num != PyArray_INT32)
3d953844abd3 support for more int types in crossentropysoftmax1hot
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 184
diff changeset
180 && (%(y_idx)s->descr->type_num != PyArray_INT16)
3d953844abd3 support for more int types in crossentropysoftmax1hot
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 184
diff changeset
181 && (%(y_idx)s->descr->type_num != PyArray_INT8))
67
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
182 {
185
3d953844abd3 support for more int types in crossentropysoftmax1hot
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 184
diff changeset
183 PyErr_SetString(PyExc_TypeError, "y_idx not int8, int16, int32, or int64");
67
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
184 %(fail)s;
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
185 }
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
186 if ((%(x)s->dimensions[1] != %(b)s->dimensions[0])
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
187 || (%(x)s->dimensions[0] != %(y_idx)s->dimensions[0]))
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
188 {
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
189 PyErr_SetString(PyExc_ValueError, "dimension mismatch in arguments");
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
190 %(fail)s;
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
191 }
34
1b152f46ad0c consolidated code
bergstrj@iro.umontreal.ca
parents: 32
diff changeset
192
67
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
193 if ((NULL == %(nll)s) //initial condition
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
194 || (%(nll)s->dimensions[0] != %(y_idx)s->dimensions[0]))
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
195 {
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
196 if (NULL != %(nll)s) Py_XDECREF(%(nll)s);
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
197 %(nll)s = (PyArrayObject*)PyArray_SimpleNew(1, PyArray_DIMS(%(y_idx)s), type_num_%(x)s);
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
198 if(!%(nll)s)
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
199 {
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
200 PyErr_SetString(PyExc_MemoryError, "failed to alloc nll output");
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
201 %(fail)s;
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
202 }
34
1b152f46ad0c consolidated code
bergstrj@iro.umontreal.ca
parents: 32
diff changeset
203 }
67
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
204 if ((NULL == %(sm)s)
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
205 || (%(sm)s->dimensions[0] != %(x)s->dimensions[0])
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
206 || (%(sm)s->dimensions[1] != %(x)s->dimensions[1]))
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
207 {
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
208 if (NULL != %(sm)s) Py_XDECREF(%(sm)s);
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
209 %(sm)s = (PyArrayObject*)PyArray_SimpleNew(2, PyArray_DIMS(%(x)s), type_num_%(x)s);
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
210 if(!%(sm)s) {
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
211 // The normal cleanup code will take care of %(nll)s
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
212 // Py_XDECREF(%(nll)s); %(nll)s=NULL;
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
213 PyErr_SetString(PyExc_MemoryError, "failed to alloc sm output");
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
214 %(fail)s
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
215 }
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
216 }
30
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
217
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
218 for (size_t i = 0; i < Nx[0]; ++i)
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
219 {
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
220 size_t j;
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
221 double sum = 0.0;
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
222 bool discount_max = false;
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
223
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
224 const double* __restrict__ x_i = (double*)(%(x)s->data + %(x)s->strides[0] * i);
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
225 const double* __restrict__ b_i = (double*)(%(b)s->data);
185
3d953844abd3 support for more int types in crossentropysoftmax1hot
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 184
diff changeset
226 const %(y_idx_type)s y_i = ((%(y_idx_type)s*)(%(y_idx)s->data + %(y_idx)s->strides[0] * i))[0];
30
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
227 double* __restrict__ sm_i = (double*)(%(sm)s->data + %(sm)s->strides[0] * i);
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
228 double* __restrict__ nll_i = (double*)(%(nll)s->data + %(nll)s->strides[0] * i);
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
229
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
230 npy_intp Sx = %(x)s->strides[1]/sizeof(double);
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
231 npy_intp Sb = %(b)s->strides[0]/sizeof(double);
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
232 npy_intp Ssm = %(sm)s->strides[1]/sizeof(double);
24
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
233
30
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
234 size_t row_max_j=0;
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
235 double row_max = x_i[0] + b_i[0];
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
236 //try to compute sum and sm the easy way
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
237 for (j = 0; j < Nx[1]; ++j)
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
238 {
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
239 double row_ij = x_i[j * Sx] + b_i[j * Sb];
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
240 row_max_j = (row_ij > row_max) ? j : row_max_j;
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
241 row_max = (row_ij > row_max) ? row_ij : row_max;
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
242
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
243 double sm_ij = exp(row_ij);
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
244 sum += sm_ij;
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
245 sm_i[j * Ssm] = sm_ij;
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
246 }
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
247 if ((0.0 == sum) || (isinf(sum)))
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
248 {
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
249 //our cheap trick didn't work... try again and do it better.
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
250 discount_max = true;
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
251 sum = 0.0; //reset sum and recompute....
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
252 for (j = 0; j < Nx[1]; ++j)
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
253 {
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
254 double row_ij = x_i[j * Sx] + b_i[j * Sb];
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
255
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
256 double sm_ij = exp(row_ij - row_max);
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
257 sum += sm_ij;
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
258 sm_i[j * Ssm] = sm_ij;
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
259 }
32
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
260 if ( (0.0 == sum) || (isinf(sum)))
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
261 {
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
262 //that was our best...
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
263 %(fail)s;
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
264 }
30
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
265 //if we still can't sum it up, we're screwed.
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
266 //So far, this assertion has never failed...
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
267 }
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
268
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
269 //cblas_dscal(x.N, 1.0 / sum, &mat_at(s,i,0), s.n);
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
270 double sum_inv = 1.0 / sum;
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
271 for (j = 0; j < Nx[1]; ++j)
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
272 {
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
273 sm_i[j * Ssm] *= sum_inv;
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
274 }
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
275
32
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
276 if (y_i >= Nx[1])
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
277 {
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
278 %(fail)s;
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
279 }
30
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
280
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
281 nll_i[0] = - x_i[y_i*Sx]
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
282 - b_i[y_i*Sb]
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
283 + (discount_max ? row_max : 0.0)
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
284 + log(sum);
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
285 //mat_at(y,i,0) = -log( mat_at(s,i,t[i])); //less accurate?
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
286 //mat_at(y,i,0) = - mat_at(x,i,t[i]) - mat_at(b,0,t[i]) + (discount_max ? maxi : 0.0) + log(sum);
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
287 }
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
288 """ % dict(locals(), **sub)
117
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
289 crossentropy_softmax_1hot_with_bias = CrossentropySoftmax1HotWithBias()
30
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
290
117
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
291 class CrossentropySoftmax1HotWithBiasDx (theano.Op):
30
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
292 nin=3
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
293 nout=1
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
294 """Gradient wrt x of the CrossentropySoftmax1Hot Op"""
117
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
295 def __init__(self, **kwargs):
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
296 theano.Op.__init__(self,**kwargs)
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
297 def make_node(self, dy, sm, y_idx,**kwargs):
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
298 dy = tensor.as_tensor(dy)
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
299 sm = tensor.as_tensor(sm)
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
300 y_idx = tensor.as_tensor(y_idx)
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
301 return theano.Apply(self, [dy, sm, y_idx],[sm.type.make_result()])
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
302 def perform(self, node, input_storage, output_storage):
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
303 dy,sm,y_idx = input_storage
30
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
304 dx = numpy.zeros_like(sm)
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
305 for i in xrange(sm.shape[0]):
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
306 dx[i] = dy[i] * sm[i] #vector scale
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
307 dx[i, y_idx[i]] -= dy[i] #scalar decrement
117
3ef569b92fba ported nnet_ops to new theano
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 70
diff changeset
308 output_storage[0][0] = dx
30
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
309 def grad(self, *args):
bf0145fa73e8 added c implementation for CrossentropySoftmax1Hot
bergstrj@iro.umontreal.ca
parents: 25
diff changeset
310 raise NotImplementedError()
181
1b06bc2c3ca9 fixed c_code for the ops in nnet_ops.py
Olivier Breuleux <breuleuo@iro.umontreal.ca>
parents: 121
diff changeset
311 def c_code(self, node, name, (dnll, sm, y_idx), (dx,), sub):
185
3d953844abd3 support for more int types in crossentropysoftmax1hot
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 184
diff changeset
312 y_idx_type = node.inputs[2].type.dtype_specs()[1]
32
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
313 return """
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
314
67
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
315 if ((%(dnll)s->descr->type_num != PyArray_DOUBLE)
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
316 || (%(sm)s->descr->type_num != PyArray_DOUBLE)
185
3d953844abd3 support for more int types in crossentropysoftmax1hot
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 184
diff changeset
317 )
67
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
318 {
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
319 PyErr_SetString(PyExc_TypeError, "types should be float64, float64, int64");
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
320 %(fail)s;
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
321 }
185
3d953844abd3 support for more int types in crossentropysoftmax1hot
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 184
diff changeset
322 if ((%(y_idx)s->descr->type_num != PyArray_INT64)
3d953844abd3 support for more int types in crossentropysoftmax1hot
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 184
diff changeset
323 && (%(y_idx)s->descr->type_num != PyArray_INT32)
3d953844abd3 support for more int types in crossentropysoftmax1hot
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 184
diff changeset
324 && (%(y_idx)s->descr->type_num != PyArray_INT16)
3d953844abd3 support for more int types in crossentropysoftmax1hot
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 184
diff changeset
325 && (%(y_idx)s->descr->type_num != PyArray_INT8))
3d953844abd3 support for more int types in crossentropysoftmax1hot
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 184
diff changeset
326 {
3d953844abd3 support for more int types in crossentropysoftmax1hot
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 184
diff changeset
327 PyErr_SetString(PyExc_TypeError, "y_idx not int8, int16, int32, or int64");
3d953844abd3 support for more int types in crossentropysoftmax1hot
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 184
diff changeset
328 %(fail)s;
3d953844abd3 support for more int types in crossentropysoftmax1hot
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 184
diff changeset
329 }
67
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
330 if ((%(dnll)s->nd != 1)
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
331 || (%(sm)s->nd != 2)
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
332 || (%(y_idx)s->nd != 1))
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
333 {
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
334 PyErr_SetString(PyExc_ValueError, "rank error");
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
335 %(fail)s;
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
336 }
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
337 if ((%(dnll)s->dimensions[0] != %(sm)s->dimensions[0])
68
315eb36ff954 fixed typo in crossentropy_dx.c_code
bergstra@is23.m
parents: 67
diff changeset
338 || (%(dnll)s->dimensions[0] != %(y_idx)s->dimensions[0]))
67
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
339 {
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
340 PyErr_SetString(PyExc_ValueError, "dimension mismatch");
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
341 %(fail)s;
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
342 }
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
343 if ((NULL == %(dx)s)
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
344 || (%(dx)s->dimensions[0] != %(sm)s->dimensions[0])
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
345 || (%(dx)s->dimensions[1] != %(sm)s->dimensions[1]))
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
346 {
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
347 if (NULL != %(dx)s) Py_XDECREF(%(dx)s);
68
315eb36ff954 fixed typo in crossentropy_dx.c_code
bergstra@is23.m
parents: 67
diff changeset
348 %(dx)s = (PyArrayObject*)PyArray_SimpleNew(2, PyArray_DIMS(%(sm)s), type_num_%(sm)s);
67
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
349 if(!%(dx)s) {
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
350 PyErr_SetString(PyExc_MemoryError, "failed to alloc dx output");
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
351 %(fail)s
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
352 }
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
353 }
24
2e8be9f5412b added nnet_ops
bergstrj@iro.umontreal.ca
parents:
diff changeset
354
67
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
355 for (size_t i = 0; i < %(dx)s->dimensions[0]; ++i)
32
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
356 {
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
357 const double dnll_i = ((double*)(%(dnll)s->data + %(dnll)s->strides[0] * i))[0];
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
358
185
3d953844abd3 support for more int types in crossentropysoftmax1hot
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 184
diff changeset
359 const %(y_idx_type)s y_i = ((%(y_idx_type)s*)(%(y_idx)s->data + %(y_idx)s->strides[0] * i))[0];
32
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
360
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
361 const double* __restrict__ sm_i = (double*)(%(sm)s->data + %(sm)s->strides[0] * i);
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
362 npy_intp Ssm = %(sm)s->strides[1]/sizeof(double);
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
363
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
364 double* __restrict__ dx_i = (double*)(%(dx)s->data + %(dx)s->strides[0] * i);
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
365 npy_intp Sdx = %(dx)s->strides[1]/sizeof(double);
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
366
67
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
367 for (size_t j = 0; j < %(dx)s->dimensions[1]; ++j)
32
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
368 {
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
369 dx_i[j * Sdx] = dnll_i * sm_i[j * Ssm];
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
370 }
67
810a8e3c85e1 fixed horrible memory leak from crossentropy...
bergstra@is23.m
parents: 34
diff changeset
371 if (y_i >= %(dx)s->dimensions[1])
32
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
372 {
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
373 %(fail)s;
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
374 }
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
375 dx_i[y_i * Sdx] -= dnll_i;
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
376 }
039c0f249859 added C impl for softmax dx
bergstrj@iro.umontreal.ca
parents: 30
diff changeset
377 """ % dict(locals(), **sub)
69
8c2607f387e6 added softplus, elaborated sigmoid
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 68
diff changeset
378
70
76e5c0f37165 better docs & precondition testing for cross_entropy_softmax_1hot & friends
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 69
diff changeset
379 def crossentropy_softmax_1hot(x, y_idx, **kwargs):
76e5c0f37165 better docs & precondition testing for cross_entropy_softmax_1hot & friends
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 69
diff changeset
380 b = tensor.zeros_like(x[0,:])
76e5c0f37165 better docs & precondition testing for cross_entropy_softmax_1hot & friends
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 69
diff changeset
381 return crossentropy_softmax_1hot_with_bias(x, b, y_idx, **kwargs)
76e5c0f37165 better docs & precondition testing for cross_entropy_softmax_1hot & friends
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 69
diff changeset
382