Mercurial > pylearn
diff algorithms/logistic_regression.py @ 495:7560817a07e8
nnet_ops => nnet
author | Joseph Turian <turian@gmail.com> |
---|---|
date | Tue, 28 Oct 2008 12:09:39 -0400 |
parents | 180d125dc7e2 |
children | a272f4cbf004 |
line wrap: on
line diff
--- a/algorithms/logistic_regression.py Tue Oct 28 11:40:56 2008 -0400 +++ b/algorithms/logistic_regression.py Tue Oct 28 12:09:39 2008 -0400 @@ -1,6 +1,6 @@ import theano from theano import tensor as T -from theano.tensor import nnet_ops +from theano.tensor import nnet from theano.compile import module from theano import printing, pprint from theano import compile @@ -30,7 +30,7 @@ self.params = [p for p in [self.w, self.b] if p.owner is None] - xent, y = nnet_ops.crossentropy_softmax_1hot( + xent, y = nnet.crossentropy_softmax_1hot( T.dot(self.x, self.w) + self.b, self.targ) sum_xent = T.sum(xent) @@ -70,7 +70,7 @@ self.params = [p for p in [self.w, self.b] if p.owner is None] - y = nnet_ops.sigmoid(T.dot(self.x, self.w)) + y = nnet.sigmoid(T.dot(self.x, self.w)) xent = -self.targ * T.log(y) - (1.0 - self.targ) * T.log(1.0 - y) sum_xent = T.sum(xent)