annotate linear_regression.py @ 404:8cc11ac97087

Debugging simple AA a bit
author Joseph Turian <turian@gmail.com>
date Thu, 10 Jul 2008 00:51:32 -0400
parents 273e5c03003e
children 5175c564e37a
rev   line source
132
f6505ec32dc3 Updated documentation slightly
Joseph Turian <turian@gmail.com>
parents: 118
diff changeset
1 """
f6505ec32dc3 Updated documentation slightly
Joseph Turian <turian@gmail.com>
parents: 118
diff changeset
2 Implementation of linear regression, with or without L2 regularization.
f6505ec32dc3 Updated documentation slightly
Joseph Turian <turian@gmail.com>
parents: 118
diff changeset
3 This is one of the simplest example of L{learner}, and illustrates
f6505ec32dc3 Updated documentation slightly
Joseph Turian <turian@gmail.com>
parents: 118
diff changeset
4 the use of theano.
f6505ec32dc3 Updated documentation slightly
Joseph Turian <turian@gmail.com>
parents: 118
diff changeset
5 """
75
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
6
390
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
7 from pylearn.learner import OfflineLearningAlgorithm
376
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
8 from theano import tensor as T
390
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
9 from theano.others_ops import prepend_1_to_each_row
75
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
10 from theano.scalar import as_scalar
376
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
11 from common.autoname import AutoName
390
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
12 import theano
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
13 import numpy
75
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
14
376
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
15 class LinearRegression(OfflineLearningAlgorithm):
75
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
16 """
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
17 Implement linear regression, with or without L2 regularization
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
18 (the former is called Ridge Regression and the latter Ordinary Least Squares).
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
19
390
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
20 Usage:
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
21
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
22 linear_regressor=LinearRegression(L2_regularizer=0.1)
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
23 linear_predictor=linear_regression(training_set)
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
24 all_results_dataset=linear_predictor(test_set) # creates a dataset with "output" and "squared_error" field
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
25 outputs = linear_predictor.compute_outputs(inputs) # inputs and outputs are numpy arrays
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
26 outputs, errors = linear_predictor.compute_outputs_and_errors(inputs,targets)
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
27 errors = linear_predictor.compute_errors(inputs,targets)
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
28 mse = linear_predictor.compute_mse(inputs,targets)
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
29
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
30
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
31
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
32 The training_set must have fields "input" and "target".
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
33 The test_set must have field "input", and needs "target" if
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
34 we want to compute the squared errors.
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
35
92
c4726e19b8ec Finished first draft of TLearner
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 78
diff changeset
36 The predictor parameters are obtained analytically from the training set.
c4726e19b8ec Finished first draft of TLearner
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 78
diff changeset
37 Training can proceed sequentially (with multiple calls to update with
c4726e19b8ec Finished first draft of TLearner
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 78
diff changeset
38 different disjoint subsets of the training sets). After each call to
c4726e19b8ec Finished first draft of TLearner
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 78
diff changeset
39 update the predictor is ready to be used (and optimized for the union
c4726e19b8ec Finished first draft of TLearner
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 78
diff changeset
40 of all the training sets passed to update since construction or since
c4726e19b8ec Finished first draft of TLearner
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 78
diff changeset
41 the last call to forget).
75
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
42
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
43 For each (input[t],output[t]) pair in a minibatch,::
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
44
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
45 output_t = b + W * input_t
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
46
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
47 where b and W are obtained by minimizing::
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
48
111
88257dfedf8c Added another work in progress, for mlp's
bengioy@bengiomac.local
parents: 110
diff changeset
49 L2_regularizer sum_{ij} W_{ij}^2 + sum_t ||output_t - target_t||^2
75
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
50
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
51 Let X be the whole training set inputs matrix (one input example per row),
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
52 with the first column full of 1's, and Let Y the whole training set
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
53 targets matrix (one example's target vector per row).
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
54 Let theta = the matrix with b in its first column and W in the others,
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
55 then each theta[:,i] is the solution of the linear system::
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
56
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
57 XtX * theta[:,i] = XtY[:,i]
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
58
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
59 where XtX is a (n_inputs+1)x(n_inputs+1) matrix containing X'*X
111
88257dfedf8c Added another work in progress, for mlp's
bengioy@bengiomac.local
parents: 110
diff changeset
60 plus L2_regularizer on the diagonal except at (0,0),
75
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
61 and XtY is a (n_inputs+1)*n_outputs matrix containing X'*Y.
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
62
376
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
63 The dataset fields expected and produced by the learning algorithm and the trained model
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
64 are the following:
75
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
65
376
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
66 - Input and output dataset fields (example-wise quantities):
75
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
67
376
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
68 - 'input' (always expected as an input_dataset field)
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
69 - 'target' (always expected by the learning algorithm, optional for learned model)
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
70 - 'output' (always produced by learned model)
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
71 - 'squared_error' (optionally produced by learned model if 'target' is provided)
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
72 = example-wise squared error
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
73 """
390
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
74 def __init__(self, L2_regularizer=0,minibatch_size=10000):
376
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
75 self.L2_regularizer=L2_regularizer
390
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
76 self.equations = LinearRegressionEquations()
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
77 self.minibatch_size=1000
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
78
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
79 def __call__(self,trainset):
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
80 first_example = trainset[0]
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
81 n_inputs = first_example['input'].size
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
82 n_outputs = first_example['target'].size
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
83 XtX = numpy.zeros((n_inputs+1,n_inputs+1))
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
84 XtY = numpy.zeros((n_inputs+1,n_outputs))
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
85 for i in xrange(n_inputs):
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
86 XtX[i+1,i+1]=self.L2_regularizer
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
87 mbs=min(self.minibatch_size,len(trainset))
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
88 for inputs,targets in trainset.minibatches(["input","target"],minibatch_size=mbs):
403
273e5c03003e Making linear_regression more robust
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 390
diff changeset
89 XtX,XtY=self.equations.update(XtX,XtY,numpy.array(inputs),numpy.array(targets))
390
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
90 theta=numpy.linalg.solve(XtX,XtY)
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
91 return LinearPredictor(theta)
75
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
92
376
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
93 class LinearPredictorEquations(AutoName):
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
94 inputs = T.matrix() # minibatchsize x n_inputs
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
95 targets = T.matrix() # minibatchsize x n_outputs
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
96 theta = T.matrix() # (n_inputs+1) x n_outputs
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
97 b = theta[0]
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
98 Wt = theta[1:,:]
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
99 outputs = T.dot(inputs,Wt) + b # minibatchsize x n_outputs
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
100 squared_errors = T.sum(T.sqr(targets-outputs),axis=1)
75
90e4c0784d6e Added draft of LinearRegression learner
bengioy@bengiomac.local
parents:
diff changeset
101
376
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
102 __compiled = False
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
103 @classmethod
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
104 def compile(cls,linker='c|py'):
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
105 if cls.__compiled:
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
106 return
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
107 def fn(input_vars,output_vars):
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
108 return staticmethod(theano.function(input_vars,output_vars, linker=linker))
110
8fa1ef2411a0 Worked on OneShotTLearner and implementation of LinearRegression
bengioy@bengiomac.local
parents: 107
diff changeset
109
390
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
110 cls.compute_outputs = fn([cls.inputs,cls.theta],[cls.outputs])
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
111 cls.compute_errors = fn([cls.outputs,cls.targets],[cls.squared_errors])
110
8fa1ef2411a0 Worked on OneShotTLearner and implementation of LinearRegression
bengioy@bengiomac.local
parents: 107
diff changeset
112
376
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
113 cls.__compiled = True
77
1e2bb5bad636 toying with different ways to implement learners
bengioy@bengiomac.local
parents: 75
diff changeset
114
390
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
115 def __init__(self):
376
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
116 self.compile()
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
117
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
118 class LinearRegressionEquations(LinearPredictorEquations):
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
119 P = LinearPredictorEquations
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
120 XtX = T.matrix() # (n_inputs+1) x (n_inputs+1)
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
121 XtY = T.matrix() # (n_inputs+1) x n_outputs
390
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
122 extended_input = prepend_1_to_each_row(P.inputs)
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
123 new_XtX = T.add_inplace(XtX,T.dot(extended_input.T,extended_input))
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
124 new_XtY = T.add_inplace(XtY,T.dot(extended_input.T,P.targets))
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
125
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
126 __compiled = False
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
127
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
128 @classmethod
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
129 def compile(cls,linker='c|py'):
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
130 if cls.__compiled:
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
131 return
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
132 def fn(input_vars,output_vars):
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
133 return staticmethod(theano.function(input_vars,output_vars, linker=linker))
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
134
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
135 cls.update = fn([cls.XtX,cls.XtY,cls.P.inputs,cls.P.targets],[cls.new_XtX,cls.new_XtY])
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
136
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
137 cls.__compiled = True
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
138
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
139 def __init__(self):
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
140 self.compile()
379
74b402b5a81b small modif by yoshue
Frederic Bastien <bastienf@iro.umontreal.ca>
parents: 376
diff changeset
141
376
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
142 class LinearPredictor(object):
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
143 """
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
144 A linear predictor has parameters theta (a bias vector and a weight matrix)
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
145 it can use to make a linear prediction (according to the LinearPredictorEquations).
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
146 It can compute its output (bias + weight * input) and a squared error (||output - target||^2).
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
147 """
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
148 def __init__(self, theta):
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
149 self.theta=theta
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
150 self.n_inputs=theta.shape[0]-1
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
151 self.n_outputs=theta.shape[1]
390
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
152 self.equations = LinearPredictorEquations()
77
1e2bb5bad636 toying with different ways to implement learners
bengioy@bengiomac.local
parents: 75
diff changeset
153
376
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
154 def compute_outputs(self,inputs):
390
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
155 return self.equations.compute_outputs(inputs,self.theta)
376
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
156 def compute_errors(self,inputs,targets):
390
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
157 return self.equations.compute_errors(self.compute_outputs(inputs),targets)
376
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
158 def compute_outputs_and_errors(self,inputs,targets):
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
159 outputs = self.compute_outputs(inputs)
390
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
160 return [outputs,self.equations.compute_errors(outputs,targets)]
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
161 def compute_mse(self,inputs,targets):
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
162 errors = self.compute_errors(inputs,targets)
efb797c5efc0 First non-crashing draft of LinearRegression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 379
diff changeset
163 return numpy.sum(errors)/errors.size
111
88257dfedf8c Added another work in progress, for mlp's
bengioy@bengiomac.local
parents: 110
diff changeset
164
376
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
165 def __call__(self,dataset,output_fieldnames=None,cached_output_dataset=False):
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
166 assert dataset.hasFields(["input"])
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
167 if output_fieldnames is None:
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
168 if dataset.hasFields(["target"]):
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
169 output_fieldnames = ["output","squared_error"]
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
170 else:
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
171 output_fieldnames = ["output"]
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
172 output_fieldnames.sort()
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
173 if output_fieldnames == ["squared_error"]:
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
174 f = self.compute_errors
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
175 elif output_fieldnames == ["output"]:
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
176 f = self.compute_outputs
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
177 elif output_fieldnames == ["output","squared_error"]:
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
178 f = self.compute_outputs_and_errors
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
179 else:
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
180 raise ValueError("unknown field(s) in output_fieldnames: "+str(output_fieldnames))
77
1e2bb5bad636 toying with different ways to implement learners
bengioy@bengiomac.local
parents: 75
diff changeset
181
376
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
182 ds=ApplyFunctionDataSet(dataset,f,output_fieldnames)
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
183 if cached_output_dataset:
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
184 return CachedDataSet(ds)
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
185 else:
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
186 return ds
c9a89be5cb0a Redesigning linear_regression
Yoshua Bengio <bengioy@iro.umontreal.ca>
parents: 132
diff changeset
187
77
1e2bb5bad636 toying with different ways to implement learners
bengioy@bengiomac.local
parents: 75
diff changeset
188