view datasets/dataset.py @ 166:17ae5a1a4dd1

Moving the convolutional MLP code into baseline
author Dumitru Erhan <dumitru.erhan@gmail.com>
date Fri, 26 Feb 2010 14:03:24 -0500
parents 4b28d7382dbf
children d6672a7daea5
line wrap: on
line source

from dsetiter import DataIterator

class DataSet(object):
    def test(self, batchsize, bufsize=None): 
        r"""
        Returns an iterator over the test examples.

        Parameters
          batchsize (int) -- the size of the minibatches, 0 means
                             return the whole set at once.
          bufsize (int, optional) -- the size of the in-memory buffer,
                                     0 to disable.
        """
        return self._return_it(batchsize, bufsize, self._test)

    def train(self, batchsize, bufsize=None):
        r"""
        Returns an iterator over the training examples.

        Parameters
          batchsize (int) -- the size of the minibatches, 0 means
                             return the whole set at once.
          bufsize (int, optional) -- the size of the in-memory buffer,
                                     0 to disable.
        """
        return self._return_it(batchsize, bufsize, self._train)

    def valid(self, batchsize, bufsize=None):
        r"""
        Returns an iterator over the validation examples.

        Parameters
          batchsize (int) -- the size of the minibatches, 0 means
                             return the whole set at once.
          bufsize (int, optional) -- the size of the in-memory buffer,
                                     0 to disable.
        """
        return self._return_it(batchsize, bufsize, self._valid)

    def _return_it(batchsize, bufsize, data):
        r"""
        Must return an iterator over the specified dataset (`data`).

        Implement this in subclassses.
        """
        raise NotImplemented