changeset 849:cfdaa56c66e8

added function documentation to lecun layer
author James Bergstra <bergstrj@iro.umontreal.ca>
date Mon, 26 Oct 2009 15:35:38 -0400
parents 77e6b2d3e5e5
children ab7d598f5579 bf2f71084d59
files pylearn/shared/layers/lecun1998.py
diffstat 1 files changed, 32 insertions(+), 0 deletions(-) [+]
line wrap: on
line diff
--- a/pylearn/shared/layers/lecun1998.py	Thu Oct 22 19:10:15 2009 -0400
+++ b/pylearn/shared/layers/lecun1998.py	Mon Oct 26 15:35:38 2009 -0400
@@ -24,6 +24,16 @@
     #    - more?
 
     def __init__(self, input, w, b, conv_op, ds_op, squash_op, params):
+        """
+        :param input: symbolic images.  Shape: (n_examples, n_images, n_rows, n_cols)
+        :param w: symbolic kernels. Shape: (n_kernels, n_images, filter_height, filter_width)
+        :param b: symbolic biases Shape (n_kernels)
+        :param conv_op: Typically, an instantiation of ConvOp.
+        :param ds_op: A downsampling op instance (such as of DownsampleFactorMax)
+        :param squash_op: an elemwise squashing function (typically tanh)
+        :param params: a list of shared variables that parametrize this layer (typically w and
+        b)
+        """
         if input.ndim != 4:
             raise TypeError(input)
         if w.ndim != 4:
@@ -40,6 +50,28 @@
             ignore_border=True, conv_subsample=(1,1), dtype=None, conv_mode='valid',
             pool_type='max', squash_fn=tensor.tanh):
         """
+        Allocate a LeNetConvPool layer with shared variable internal parameters.
+
+        :param rng: a random number generator used to initialize weights
+        :param input: symbolic images.  Shape: (n_examples, n_imgs, img_shape[0], img_shape[1])
+        :param n_examples: input's shape[0] at runtime
+        :param n_imgs: input's shape[1] at runtime
+        :param img_shape: input's shape[2:4] at runtime
+        :param n_filters: the number of filters to apply to the image.
+        :param filter_shape: the size of the filters to apply
+        :type filter_shape: pair (rows, cols)
+        :param poolsize: the downsampling factor
+        :type poolsize: pair (rows, cols)
+        :param ignore_border: True means the downsampling should skip the scrap around the
+        edges if there is any.
+        :param conv_subsample: by how much should the convolution subsample the image?
+        :type  conv_subsample: pair (rows, cols)
+        :param dtype: the dtype for the internally allocated parameters.  This defaults to the
+            input's dtype.
+        :param conv_mode: The convolution mode ('full' or 'valid')
+        :param pool_type: Must be 'max' for now (reserved for different kinds of pooling)
+        :param squash_fn: The activation function for this layer
+        :type  squash_fn: A one-to-one elemwise function such as tanh or logistic sigmoid.
         """
         if pool_type != 'max':
             # LeNet5 actually used averaging filters. Consider implementing 'mean'