comparison algorithms/logistic_regression.py @ 499:a419edf4e06c

removed unpicklable nested classes in logistic regression
author James Bergstra <bergstrj@iro.umontreal.ca>
date Tue, 28 Oct 2008 12:57:49 -0400
parents a272f4cbf004
children 4fb6f7320518
comparison
equal deleted inserted replaced
498:2be795cc5c3a 499:a419edf4e06c
5 from theano import printing, pprint 5 from theano import printing, pprint
6 from theano import compile 6 from theano import compile
7 7
8 import numpy as N 8 import numpy as N
9 9
10 class LogRegInstanceType(module.FancyModuleInstance):
11 def initialize(self, n_in, n_out=1, rng=N.random):
12 #self.component is the LogisticRegressionTemplate instance that built this guy.
13
14 self.w = N.zeros((n_in, n_out))
15 self.b = N.zeros(n_out)
16 self.lr = 0.01
17 self.__hide__ = ['params']
10 18
11 class Module_Nclass(module.FancyModule): 19 class Module_Nclass(module.FancyModule):
12 class InstanceType(module.FancyModuleInstance): 20 InstanceType = LogRegInstanceType
13 def initialize(self, n_in, n_out, rng=N.random):
14 #self.component is the LogisticRegressionTemplate instance that built this guy.
15 21
16 self.w = N.zeros((n_in, n_out)) 22 def __init__(self, x=None, targ=None, w=None, b=None, lr=None, regularize=False):
17 self.b = N.zeros(n_out)
18 self.lr = 0.01
19 self.__hide__ = ['params']
20
21 def __init__(self, input=None, targ=None, w=None, b=None, lr=None, regularize=False):
22 super(Module_Nclass, self).__init__() #boilerplate 23 super(Module_Nclass, self).__init__() #boilerplate
23 24
24 self.input = input if input is not None else T.matrix('input') 25 self.x = x if x is not None else T.matrix('input')
25 self.targ = targ if targ is not None else T.lvector() 26 self.targ = targ if targ is not None else T.lvector()
26 27
27 self.w = w if w is not None else module.Member(T.dmatrix()) 28 self.w = w if w is not None else module.Member(T.dmatrix())
28 self.b = b if b is not None else module.Member(T.dvector()) 29 self.b = b if b is not None else module.Member(T.dvector())
29 self.lr = lr if lr is not None else module.Member(T.dscalar()) 30 self.lr = lr if lr is not None else module.Member(T.dscalar())
30 31
31 self.params = [p for p in [self.w, self.b] if p.owner is None] 32 self.params = [p for p in [self.w, self.b] if p.owner is None]
32 33
33 xent, output = nnet.crossentropy_softmax_1hot( 34 xent, output = nnet.crossentropy_softmax_1hot(
34 T.dot(self.input, self.w) + self.b, self.targ) 35 T.dot(self.x, self.w) + self.b, self.targ)
35 sum_xent = T.sum(xent) 36 sum_xent = T.sum(xent)
36 37
37 self.output = output 38 self.output = output
38 self.sum_xent = sum_xent 39 self.sum_xent = sum_xent
40
41 #compatibility with current implementation of stacker/daa or something
42 #TODO: remove this, make a wrapper
39 self.cost = sum_xent 43 self.cost = sum_xent
44 self.input = self.x
40 45
41 #define the apply method 46 #define the apply method
42 self.pred = T.argmax(T.dot(self.input, self.w) + self.b, axis=1) 47 self.pred = T.argmax(T.dot(self.input, self.w) + self.b, axis=1)
43 self.apply = module.Method([self.input], self.pred) 48 self.apply = module.Method([self.input], self.pred)
44 49
47 52
48 self.update = module.Method([self.input, self.targ], sum_xent, 53 self.update = module.Method([self.input, self.targ], sum_xent,
49 updates = dict((p, p - self.lr * g) for p, g in zip(self.params, gparams))) 54 updates = dict((p, p - self.lr * g) for p, g in zip(self.params, gparams)))
50 55
51 class Module(module.FancyModule): 56 class Module(module.FancyModule):
52 class InstanceType(module.FancyModuleInstance): 57 InstanceType = LogRegInstanceType
53 def initialize(self, n_in):
54 #self.component is the LogisticRegressionTemplate instance that built this guy.
55
56 self.w = N.random.randn(n_in,1)
57 self.b = N.random.randn(1)
58 self.lr = 0.01
59 self.__hide__ = ['params']
60 58
61 def __init__(self, input=None, targ=None, w=None, b=None, lr=None, regularize=False): 59 def __init__(self, input=None, targ=None, w=None, b=None, lr=None, regularize=False):
62 super(Module, self).__init__() #boilerplate 60 super(Module, self).__init__() #boilerplate
63 61
64 self.input = input if input is not None else T.matrix('input') 62 self.input = input if input is not None else T.matrix('input')
87 if self.params: 85 if self.params:
88 gparams = T.grad(sum_xent, self.params) 86 gparams = T.grad(sum_xent, self.params)
89 self.update = module.Method([self.input, self.targ], sum_xent, 87 self.update = module.Method([self.input, self.targ], sum_xent,
90 updates = dict((p, p - self.lr * g) for p, g in zip(self.params, gparams))) 88 updates = dict((p, p - self.lr * g) for p, g in zip(self.params, gparams)))
91 89
92
93
94 class Learner(object): 90 class Learner(object):
95 """TODO: Encapsulate the algorithm for finding an optimal regularization coefficient""" 91 """TODO: Encapsulate the algorithm for finding an optimal regularization coefficient"""
96 pass 92 pass
97 93