annotate algorithms/logistic_regression.py @ 507:b8e6de17eaa6

modifs to smallNorb
author James Bergstra <bergstrj@iro.umontreal.ca>
date Wed, 29 Oct 2008 18:06:49 -0400
parents c7ce66b4e8f4
children b267a8000f92
rev   line source
470
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
1 import theano
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
2 from theano import tensor as T
495
7560817a07e8 nnet_ops => nnet
Joseph Turian <turian@gmail.com>
parents: 491
diff changeset
3 from theano.tensor import nnet
470
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
4 from theano.compile import module
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
5 from theano import printing, pprint
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
6 from theano import compile
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
7
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
8 import numpy as N
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
9
499
a419edf4e06c removed unpicklable nested classes in logistic regression
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 497
diff changeset
10 class LogRegInstanceType(module.FancyModuleInstance):
501
4fb6f7320518 N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents: 499
diff changeset
11 def initialize(self, n_in, n_out=1, rng=N.random, seed=None):
499
a419edf4e06c removed unpicklable nested classes in logistic regression
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 497
diff changeset
12 #self.component is the LogisticRegressionTemplate instance that built this guy.
501
4fb6f7320518 N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents: 499
diff changeset
13 """
4fb6f7320518 N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents: 499
diff changeset
14 @todo: Remove seed. Used only to keep Stacker happy.
4fb6f7320518 N-class logistic regression top-layer works
Joseph Turian <turian@gmail.com>
parents: 499
diff changeset
15 """
499
a419edf4e06c removed unpicklable nested classes in logistic regression
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 497
diff changeset
16
a419edf4e06c removed unpicklable nested classes in logistic regression
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 497
diff changeset
17 self.w = N.zeros((n_in, n_out))
a419edf4e06c removed unpicklable nested classes in logistic regression
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 497
diff changeset
18 self.b = N.zeros(n_out)
a419edf4e06c removed unpicklable nested classes in logistic regression
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 497
diff changeset
19 self.lr = 0.01
a419edf4e06c removed unpicklable nested classes in logistic regression
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 497
diff changeset
20 self.__hide__ = ['params']
503
c7ce66b4e8f4 Extensions to algorithms, and some cleanup (by defining linear_output result).
Joseph Turian <turian@gmail.com>
parents: 502
diff changeset
21 self.input_dimension = n_in
c7ce66b4e8f4 Extensions to algorithms, and some cleanup (by defining linear_output result).
Joseph Turian <turian@gmail.com>
parents: 502
diff changeset
22 self.output_dimension = n_out
470
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
23
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
24 class Module_Nclass(module.FancyModule):
499
a419edf4e06c removed unpicklable nested classes in logistic regression
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 497
diff changeset
25 InstanceType = LogRegInstanceType
470
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
26
499
a419edf4e06c removed unpicklable nested classes in logistic regression
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 497
diff changeset
27 def __init__(self, x=None, targ=None, w=None, b=None, lr=None, regularize=False):
470
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
28 super(Module_Nclass, self).__init__() #boilerplate
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
29
499
a419edf4e06c removed unpicklable nested classes in logistic regression
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 497
diff changeset
30 self.x = x if x is not None else T.matrix('input')
470
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
31 self.targ = targ if targ is not None else T.lvector()
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
32
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
33 self.w = w if w is not None else module.Member(T.dmatrix())
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
34 self.b = b if b is not None else module.Member(T.dvector())
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
35 self.lr = lr if lr is not None else module.Member(T.dscalar())
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
36
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
37 self.params = [p for p in [self.w, self.b] if p.owner is None]
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
38
503
c7ce66b4e8f4 Extensions to algorithms, and some cleanup (by defining linear_output result).
Joseph Turian <turian@gmail.com>
parents: 502
diff changeset
39 linear_output = T.dot(self.x, self.w) + self.b
c7ce66b4e8f4 Extensions to algorithms, and some cleanup (by defining linear_output result).
Joseph Turian <turian@gmail.com>
parents: 502
diff changeset
40
c7ce66b4e8f4 Extensions to algorithms, and some cleanup (by defining linear_output result).
Joseph Turian <turian@gmail.com>
parents: 502
diff changeset
41 (xent, softmax, max_pr, argmax) = nnet.crossentropy_softmax_max_and_argmax_1hot(
c7ce66b4e8f4 Extensions to algorithms, and some cleanup (by defining linear_output result).
Joseph Turian <turian@gmail.com>
parents: 502
diff changeset
42 linear_output, self.targ)
470
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
43 sum_xent = T.sum(xent)
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
44
503
c7ce66b4e8f4 Extensions to algorithms, and some cleanup (by defining linear_output result).
Joseph Turian <turian@gmail.com>
parents: 502
diff changeset
45 self.softmax = softmax
c7ce66b4e8f4 Extensions to algorithms, and some cleanup (by defining linear_output result).
Joseph Turian <turian@gmail.com>
parents: 502
diff changeset
46 self.argmax = argmax
c7ce66b4e8f4 Extensions to algorithms, and some cleanup (by defining linear_output result).
Joseph Turian <turian@gmail.com>
parents: 502
diff changeset
47 self.max_pr = max_pr
470
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
48 self.sum_xent = sum_xent
499
a419edf4e06c removed unpicklable nested classes in logistic regression
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 497
diff changeset
49
503
c7ce66b4e8f4 Extensions to algorithms, and some cleanup (by defining linear_output result).
Joseph Turian <turian@gmail.com>
parents: 502
diff changeset
50 # Softmax being computed directly.
c7ce66b4e8f4 Extensions to algorithms, and some cleanup (by defining linear_output result).
Joseph Turian <turian@gmail.com>
parents: 502
diff changeset
51 softmax_unsupervised = nnet.softmax(linear_output)
c7ce66b4e8f4 Extensions to algorithms, and some cleanup (by defining linear_output result).
Joseph Turian <turian@gmail.com>
parents: 502
diff changeset
52 self.softmax_unsupervised = softmax_unsupervised
c7ce66b4e8f4 Extensions to algorithms, and some cleanup (by defining linear_output result).
Joseph Turian <turian@gmail.com>
parents: 502
diff changeset
53
499
a419edf4e06c removed unpicklable nested classes in logistic regression
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 497
diff changeset
54 #compatibility with current implementation of stacker/daa or something
a419edf4e06c removed unpicklable nested classes in logistic regression
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 497
diff changeset
55 #TODO: remove this, make a wrapper
503
c7ce66b4e8f4 Extensions to algorithms, and some cleanup (by defining linear_output result).
Joseph Turian <turian@gmail.com>
parents: 502
diff changeset
56 self.cost = self.sum_xent
499
a419edf4e06c removed unpicklable nested classes in logistic regression
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 497
diff changeset
57 self.input = self.x
503
c7ce66b4e8f4 Extensions to algorithms, and some cleanup (by defining linear_output result).
Joseph Turian <turian@gmail.com>
parents: 502
diff changeset
58 # TODO: I want to make output = linear_output.
c7ce66b4e8f4 Extensions to algorithms, and some cleanup (by defining linear_output result).
Joseph Turian <turian@gmail.com>
parents: 502
diff changeset
59 self.output = self.softmax_unsupervised
470
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
60
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
61 #define the apply method
503
c7ce66b4e8f4 Extensions to algorithms, and some cleanup (by defining linear_output result).
Joseph Turian <turian@gmail.com>
parents: 502
diff changeset
62 self.pred = T.argmax(linear_output, axis=1)
497
a272f4cbf004 'x' => 'input'
Joseph Turian <turian@gmail.com>
parents: 495
diff changeset
63 self.apply = module.Method([self.input], self.pred)
470
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
64
503
c7ce66b4e8f4 Extensions to algorithms, and some cleanup (by defining linear_output result).
Joseph Turian <turian@gmail.com>
parents: 502
diff changeset
65 self.validate = module.Method([self.input, self.targ], [self.cost, self.argmax, self.max_pr])
c7ce66b4e8f4 Extensions to algorithms, and some cleanup (by defining linear_output result).
Joseph Turian <turian@gmail.com>
parents: 502
diff changeset
66 self.softmax_output = module.Method([self.input], self.softmax_unsupervised)
c7ce66b4e8f4 Extensions to algorithms, and some cleanup (by defining linear_output result).
Joseph Turian <turian@gmail.com>
parents: 502
diff changeset
67
470
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
68 if self.params:
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
69 gparams = T.grad(sum_xent, self.params)
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
70
497
a272f4cbf004 'x' => 'input'
Joseph Turian <turian@gmail.com>
parents: 495
diff changeset
71 self.update = module.Method([self.input, self.targ], sum_xent,
470
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
72 updates = dict((p, p - self.lr * g) for p, g in zip(self.params, gparams)))
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
73
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
74 class Module(module.FancyModule):
499
a419edf4e06c removed unpicklable nested classes in logistic regression
James Bergstra <bergstrj@iro.umontreal.ca>
parents: 497
diff changeset
75 InstanceType = LogRegInstanceType
470
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
76
497
a272f4cbf004 'x' => 'input'
Joseph Turian <turian@gmail.com>
parents: 495
diff changeset
77 def __init__(self, input=None, targ=None, w=None, b=None, lr=None, regularize=False):
470
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
78 super(Module, self).__init__() #boilerplate
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
79
497
a272f4cbf004 'x' => 'input'
Joseph Turian <turian@gmail.com>
parents: 495
diff changeset
80 self.input = input if input is not None else T.matrix('input')
470
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
81 self.targ = targ if targ is not None else T.lcol()
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
82
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
83 self.w = w if w is not None else module.Member(T.dmatrix())
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
84 self.b = b if b is not None else module.Member(T.dvector())
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
85 self.lr = lr if lr is not None else module.Member(T.dscalar())
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
86
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
87 self.params = [p for p in [self.w, self.b] if p.owner is None]
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
88
502
17945defd813 Bug fix
Joseph Turian <turian@gmail.com>
parents: 501
diff changeset
89 output = nnet.sigmoid(T.dot(self.x, self.w) + self.b)
497
a272f4cbf004 'x' => 'input'
Joseph Turian <turian@gmail.com>
parents: 495
diff changeset
90 xent = -self.targ * T.log(output) - (1.0 - self.targ) * T.log(1.0 - output)
470
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
91 sum_xent = T.sum(xent)
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
92
497
a272f4cbf004 'x' => 'input'
Joseph Turian <turian@gmail.com>
parents: 495
diff changeset
93 self.output = output
470
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
94 self.xent = xent
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
95 self.sum_xent = sum_xent
491
180d125dc7e2 made logistic_regression classes compatible with stacker
Olivier Breuleux <breuleuo@iro.umontreal.ca>
parents: 476
diff changeset
96 self.cost = sum_xent
470
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
97
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
98 #define the apply method
497
a272f4cbf004 'x' => 'input'
Joseph Turian <turian@gmail.com>
parents: 495
diff changeset
99 self.pred = (T.dot(self.input, self.w) + self.b) > 0.0
a272f4cbf004 'x' => 'input'
Joseph Turian <turian@gmail.com>
parents: 495
diff changeset
100 self.apply = module.Method([self.input], self.pred)
470
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
101
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
102 #if this module has any internal parameters, define an update function for them
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
103 if self.params:
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
104 gparams = T.grad(sum_xent, self.params)
497
a272f4cbf004 'x' => 'input'
Joseph Turian <turian@gmail.com>
parents: 495
diff changeset
105 self.update = module.Method([self.input, self.targ], sum_xent,
470
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
106 updates = dict((p, p - self.lr * g) for p, g in zip(self.params, gparams)))
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
107
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
108 class Learner(object):
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
109 """TODO: Encapsulate the algorithm for finding an optimal regularization coefficient"""
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
110 pass
bd937e845bbb new stuff: algorithms/logistic_regression, datasets/MNIST
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff changeset
111