Mercurial > pylearn
changeset 211:bd728c83faff
in __get__, problem if the i.stop was None, i being the slice, added one line replacing None by the len(self)
author | Thierry Bertin-Mahieux <bertinmt@iro.umontreal.ca> |
---|---|
date | Wed, 21 May 2008 17:39:30 -0400 |
parents | ffd50efefb70 |
children | 9b57ea8c767f |
files | dataset.py denoising_aa.py learner.py mlp_factory_approach.py stopper.py |
diffstat | 5 files changed, 20 insertions(+), 4 deletions(-) [+] |
line wrap: on
line diff
--- a/dataset.py Sat May 17 00:01:47 2008 -0400 +++ b/dataset.py Wed May 21 17:39:30 2008 -0400 @@ -442,7 +442,9 @@ rows=None # or a slice if type(i) is slice: + #print 'i=',i if not i.start: i=slice(0,i.stop,i.step) + if not i.stop: i=slice(i.start,len(self),i.step) if not i.step: i=slice(i.start,i.stop,1) if i.step is 1: return self.minibatches(minibatch_size=i.stop-i.start,n_batches=1,offset=i.start).next().examples() @@ -662,6 +664,7 @@ and a values_hstack(fieldnames,fieldvalues) functions behaving with the same semantics as the DataSet methods of the same name (but without the self argument). """ + self._fields=fields_lookuplist assert len(fields_lookuplist)>0 self.length=len(fields_lookuplist[0]) @@ -1140,6 +1143,9 @@ Note that the expected semantics of the function differs in minibatch mode (it takes minibatches of inputs and produces minibatches of outputs, as documented in the class comment). + + TBM: are filedtypes the old field types (from input_dataset) or the new ones + (for the new dataset created)? """ self.input_dataset=input_dataset self.function=function @@ -1182,6 +1188,7 @@ return all_outputs return Example(fieldnames,[all_outputs[name] for name in fieldnames]) + return ApplyFunctionIterator(self) def __iter__(self): # only implemented for increased efficiency
--- a/denoising_aa.py Sat May 17 00:01:47 2008 -0400 +++ b/denoising_aa.py Wed May 21 17:39:30 2008 -0400 @@ -104,6 +104,7 @@ def __call__(self, training_set=None): model = DenoisingAutoEncoderModel(self) if training_set: + print 'what do I do if training set????' def compile(self, inputs, outputs): return theano.function(inputs,outputs,unpack_single=False,linker=self.linker) @@ -118,6 +119,7 @@ def update(self, training_set, train_stats_collector=None): + print 'dont update you crazy frog!' # old stuff
--- a/learner.py Sat May 17 00:01:47 2008 -0400 +++ b/learner.py Wed May 21 17:39:30 2008 -0400 @@ -1,3 +1,4 @@ + from exceptions import * from dataset import AttributesHolder
--- a/mlp_factory_approach.py Sat May 17 00:01:47 2008 -0400 +++ b/mlp_factory_approach.py Wed May 21 17:39:30 2008 -0400 @@ -6,6 +6,7 @@ from tlearn import dataset, nnet_ops, stopper + def _randshape(*shape): return (numpy.random.rand(*shape) -0.5) * 0.001 @@ -31,7 +32,8 @@ params = self.params #TODO: why should we have to unpack target like this? for input, target in input_target: - self.update_fn(input, target[:,0], *params) + rval= self.update_fn(input, target[:,0], *params) + print rval[0] def __call__(self, testset, fieldnames=['output_class']): """Apply this model (as a function) to new data""" @@ -102,7 +104,7 @@ # prefer caching in _Model.__call__ return theano.function(inputs, outputs, unpack_single=False, linker=self.linker) - def __call__(self, trainset=None, iparams=None): + def __call__(self, trainset=None, iparams=None, input='input', target='target'): """Allocate and optionally train a model""" if iparams is None: iparams = [_randshape(self.nhid, self.nclass), _randshape(self.nclass)]\ @@ -119,8 +121,9 @@ best = rval for stp in self.early_stopper(): rval.update( - trainset.minibatches(['input', 'target'], minibatch_size=min(32, + minset.minibatches([input, target], minibatch_size=min(32, len(trainset)))) + print 'mlp.__call__(), we did an update' if stp.set_score: stp.score = rval(valset, ['loss_01']) if (stp.score < stp.best_score): @@ -154,7 +157,7 @@ , linker='c&py' , early_stopper = lambda:stopper.NStages(100,1)) - model1 = learn_algo(training_set1) + model1 = learn_algo(training_set1,input='input',target='target') model2 = learn_algo(training_set2)
--- a/stopper.py Sat May 17 00:01:47 2008 -0400 +++ b/stopper.py Wed May 21 17:39:30 2008 -0400 @@ -75,6 +75,9 @@ E_set_score = 'when iter.set_score is True, caller must assign a score to iter.score' def next(self): + + print 'ICML08 stopper, were doing a next' + if self.set_score: #left over from last time if self.score is None: raise Exception(ICML08Stopper.E_set_score)