Mercurial > pylearn
diff dataset.py @ 273:fa8abc813bd2
Automated merge with ssh://projects@lgcm.iro.umontreal.ca/hg/pylearn
author | Frederic Bastien <bastienf@iro.umontreal.ca> |
---|---|
date | Thu, 05 Jun 2008 11:47:44 -0400 |
parents | fdce496c3b56 6226ebafefc3 |
children | ed70580f2324 |
line wrap: on
line diff
--- a/dataset.py Wed Jun 04 19:04:40 2008 -0400 +++ b/dataset.py Thu Jun 05 11:47:44 2008 -0400 @@ -1051,32 +1051,30 @@ return self.__dict__[key] def __iter__(self): - class ArrayDataSetIterator2(object): - def __init__(self,dataset,fieldnames,minibatch_size,n_batches,offset): + class ArrayDataSetIteratorIter(object): + def __init__(self,dataset,fieldnames): if fieldnames is None: fieldnames = dataset.fieldNames() # store the resulting minibatch in a lookup-list of values self.minibatch = LookupList(fieldnames,[0]*len(fieldnames)) self.dataset=dataset - self.minibatch_size=minibatch_size - assert offset>=0 and offset<len(dataset.data) - assert offset+minibatch_size<=len(dataset.data) - self.current=offset + self.current=0 self.columns = [self.dataset.fields_columns[f] for f in self.minibatch._names] + self.l = self.dataset.data.shape[0] def __iter__(self): return self def next(self): #@todo: we suppose that we need to stop only when minibatch_size == 1. # Otherwise, MinibatchWrapAroundIterator do it. - if self.current>=self.dataset.data.shape[0]: + if self.current>=self.l: raise StopIteration sub_data = self.dataset.data[self.current] self.minibatch._values = [sub_data[c] for c in self.columns] - self.current+=self.minibatch_size + self.current+=1 return self.minibatch - return ArrayDataSetIterator2(self,self.fieldNames(),1,0,0) + return ArrayDataSetIteratorIter(self,self.fieldNames()) def minibatches_nowrap(self,fieldnames,minibatch_size,n_batches,offset): class ArrayDataSetIterator(object):