Mercurial > pylearn
comparison mlp_factory_approach.py @ 304:6ead65d30f1e
while learning using __call__, we can now set the early stopper
author | Thierry Bertin-Mahieux <bertinmt@iro.umontreal.ca> |
---|---|
date | Tue, 10 Jun 2008 17:16:49 -0400 |
parents | eded3cb54930 |
children | 93280a0c151a |
comparison
equal
deleted
inserted
replaced
303:410a6ef674ed | 304:6ead65d30f1e |
---|---|
3 | 3 |
4 import theano | 4 import theano |
5 from theano import tensor as T | 5 from theano import tensor as T |
6 | 6 |
7 import dataset, nnet_ops, stopper, filetensor | 7 import dataset, nnet_ops, stopper, filetensor |
8 from lookup_list import LookupList | 8 from pylearn.lookup_list import LookupList |
9 | 9 |
10 | 10 |
11 class AbstractFunction (Exception): pass | 11 class AbstractFunction (Exception): pass |
12 | 12 |
13 class AutoName(object): | 13 class AutoName(object): |
121 | 121 |
122 def linker(self): | 122 def linker(self): |
123 return theano.gof.PerformLinker() | 123 return theano.gof.PerformLinker() |
124 | 124 |
125 def early_stopper(self): | 125 def early_stopper(self): |
126 stopper.NStages(10,1) | 126 stopper.NStages(300,1) |
127 | 127 |
128 def train_iter(self, trainset): | 128 def train_iter(self, trainset): |
129 raise AbstractFunction | 129 raise AbstractFunction |
130 optimizer = Opt() | 130 optimizer = Opt() |
131 | 131 |
144 # prefer caching in Model.__call__ | 144 # prefer caching in Model.__call__ |
145 return theano.function(inputs, outputs, | 145 return theano.function(inputs, outputs, |
146 unpack_single=False, | 146 unpack_single=False, |
147 optimizer=self.graph.optimizer, | 147 optimizer=self.graph.optimizer, |
148 linker=self.graph.linker() if hasattr(self.graph, 'linker') | 148 linker=self.graph.linker() if hasattr(self.graph, 'linker') |
149 else 'c&py') | 149 else 'c|py') |
150 | 150 |
151 def __call__(self, | 151 def __call__(self, |
152 trainset=None, | 152 trainset=None, |
153 validset=None, | 153 validset=None, |
154 iparams=None): | 154 iparams=None, |
155 stp=None): | |
155 """Allocate and optionally train a model | 156 """Allocate and optionally train a model |
156 | 157 |
157 @param trainset: Data for minimizing the cost function | 158 @param trainset: Data for minimizing the cost function |
158 @type trainset: None or Dataset | 159 @type trainset: None or Dataset |
159 | 160 |
163 @param input: name of field to use as input | 164 @param input: name of field to use as input |
164 @type input: string | 165 @type input: string |
165 | 166 |
166 @param target: name of field to use as target | 167 @param target: name of field to use as target |
167 @type target: string | 168 @type target: string |
169 | |
170 @param stp: early stopper, if None use default in graphMLP.G | |
171 @type stp: None or early stopper | |
168 | 172 |
169 @return: model | 173 @return: model |
170 @rtype: GraphLearner.Model instance | 174 @rtype: GraphLearner.Model instance |
171 | 175 |
172 """ | 176 """ |
182 curmodel = GraphLearner.Model(self, iparams) | 186 curmodel = GraphLearner.Model(self, iparams) |
183 best = curmodel | 187 best = curmodel |
184 | 188 |
185 if trainset is not None: | 189 if trainset is not None: |
186 #do some training by calling Model.update_minibatch() | 190 #do some training by calling Model.update_minibatch() |
187 stp = self.graph.early_stopper() | 191 if stp == None : |
188 for mb in self.graph.train_iter(trainset): | 192 stp = self.graph.early_stopper() |
189 curmodel.update_minibatch(mb) | 193 try : |
190 if stp.set_score: | 194 countiter = 0 |
191 if validset: | 195 for mb in self.graph.train_iter(trainset): |
192 stp.score = curmodel(validset, ['validset_score']) | 196 curmodel.update_minibatch(mb) |
193 if (stp.score < stp.best_score): | 197 if stp.set_score: |
194 best = copy.copy(curmodel) | 198 if validset: |
195 else: | 199 stp.score = curmodel(validset, ['validset_score']) |
196 stp.score = 0.0 | 200 if (stp.score < stp.best_score): |
197 stp.next() | 201 best = copy.copy(curmodel) |
202 else: | |
203 stp.score = 0.0 | |
204 countiter +=1 | |
205 stp.next() | |
206 except StopIteration : | |
207 print 'Iterations stopped after ', countiter,' iterations' | |
198 if validset: | 208 if validset: |
199 curmodel = best | 209 curmodel = best |
200 return curmodel | 210 return curmodel |
201 | 211 |
202 | 212 |
276 , randsmall(nhid, nclass) | 286 , randsmall(nhid, nclass) |
277 , randsmall(nclass)] | 287 , randsmall(nclass)] |
278 | 288 |
279 def train_iter(self, trainset): | 289 def train_iter(self, trainset): |
280 return trainset.minibatches(['input', 'target'], | 290 return trainset.minibatches(['input', 'target'], |
281 minibatch_size=min(len(trainset), 32), n_batches=300) | 291 minibatch_size=min(len(trainset), 32), n_batches=2000) |
282 def early_stopper(self): | 292 def early_stopper(self): |
293 """ overwrites GraphLearner.graph function """ | |
283 return stopper.NStages(300,1) | 294 return stopper.NStages(300,1) |
284 | 295 |
285 return G() | 296 return G() |
286 | 297 |
287 | 298 |