comparison algorithms/logistic_regression.py @ 470:bd937e845bbb

new stuff: algorithms/logistic_regression, datasets/MNIST
author James Bergstra <bergstrj@iro.umontreal.ca>
date Wed, 22 Oct 2008 15:56:53 -0400
parents
children 69c800af1370
comparison
equal deleted inserted replaced
469:4335309f4924 470:bd937e845bbb
1 import theano
2 from theano import tensor as T
3 from theano.tensor import nnet_ops
4 from theano.compile import module
5 from theano import printing, pprint
6 from theano import compile
7
8 import numpy as N
9
10
11 class Module_Nclass(module.FancyModule):
12 class __instance_type__(module.FancyModuleInstance):
13 def initialize(self, n_in, n_out, rng=N.random):
14 #self.component is the LogisticRegressionTemplate instance that built this guy.
15
16 self.w = rng.randn(n_in, n_out)
17 self.b = rng.randn(n_out)
18 self.lr = 0.01
19 self.__hide__ = ['params']
20
21 def __init__(self, x=None, targ=None, w=None, b=None, lr=None):
22 super(Module_Nclass, self).__init__() #boilerplate
23
24 self.x = x if x is not None else T.matrix()
25 self.targ = targ if targ is not None else T.lvector()
26
27 self.w = w if w is not None else module.Member(T.dmatrix())
28 self.b = b if b is not None else module.Member(T.dvector())
29 self.lr = lr if lr is not None else module.Member(T.dscalar())
30
31 self.params = [p for p in [self.w, self.b] if p.owner is None]
32
33 xent, y = nnet_ops.crossentropy_softmax_1hot(
34 T.dot(self.x, self.w) + self.b, self.targ)
35 sum_xent = T.sum(xent)
36
37 self.y = y
38 self.sum_xent = sum_xent
39
40 #define the apply method
41 self.pred = T.argmax(T.dot(self.x, self.w) + self.b, axis=1)
42 self.apply = module.Method([self.x], self.pred)
43
44 if self.params:
45 gparams = T.grad(sum_xent, self.params)
46
47 self.update = module.Method([self.x, self.targ], sum_xent,
48 updates = dict((p, p - self.lr * g) for p, g in zip(self.params, gparams)))
49
50 class Module(module.FancyModule):
51 class __instance_type__(module.FancyModuleInstance):
52 def initialize(self, n_in):
53 #self.component is the LogisticRegressionTemplate instance that built this guy.
54
55 self.w = N.random.randn(n_in,1)
56 self.b = N.random.randn(1)
57 self.lr = 0.01
58 self.__hide__ = ['params']
59
60 def __init__(self, x=None, targ=None, w=None, b=None, lr=None):
61 super(Module, self).__init__() #boilerplate
62
63 self.x = x if x is not None else T.matrix()
64 self.targ = targ if targ is not None else T.lcol()
65
66 self.w = w if w is not None else module.Member(T.dmatrix())
67 self.b = b if b is not None else module.Member(T.dvector())
68 self.lr = lr if lr is not None else module.Member(T.dscalar())
69
70 self.params = [p for p in [self.w, self.b] if p.owner is None]
71
72 y = nnet_ops.sigmoid(T.dot(self.x, self.w))
73 xent = -self.targ * T.log(y) - (1.0 - self.targ) * T.log(1.0 - y)
74 sum_xent = T.sum(xent)
75
76 self.y = y
77 self.xent = xent
78 self.sum_xent = sum_xent
79
80 #define the apply method
81 self.pred = (T.dot(self.x, self.w) + self.b) > 0.0
82 self.apply = module.Method([self.x], self.pred)
83
84 #if this module has any internal parameters, define an update function for them
85 if self.params:
86 gparams = T.grad(sum_xent, self.params)
87 self.update = module.Method([self.x, self.targ], sum_xent,
88 updates = dict((p, p - self.lr * g) for p, g in zip(self.params, gparams)))
89
90
91
92 class Learner(object):
93 """TODO: Encapsulate the algorithm for finding an optimal regularization coefficient"""
94 pass
95