changeset 1382:00116be92710

First try to use numpy documentation syntax.
author Frederic Bastien <nouiz@nouiz.org>
date Wed, 08 Dec 2010 14:30:13 -0500
parents 0673e6af650a
children 0de66ab23dcc
files doc/conf.py pylearn/formulas/activations.py
diffstat 2 files changed, 117 insertions(+), 55 deletions(-) [+]
line wrap: on
line diff
--- a/doc/conf.py	Mon Dec 06 13:33:07 2010 -0500
+++ b/doc/conf.py	Wed Dec 08 14:30:13 2010 -0500
@@ -33,6 +33,11 @@
 except ImportError:
     pass
 
+try:
+    import numpydoc
+    extensions.append('numpydoc')
+except ImportError:
+    pass
 
 # Add any paths that contain templates here, relative to this directory.
 templates_path = ['.templates']
--- a/pylearn/formulas/activations.py	Mon Dec 06 13:33:07 2010 -0500
+++ b/pylearn/formulas/activations.py	Wed Dec 08 14:30:13 2010 -0500
@@ -24,6 +24,7 @@
     function of the input x.
 
     .. math::
+
         \\textrm{sigmoid}(x) = \\frac{1}{1 + e^x}
 
     The image of :math:`\\textrm{sigmoid}(x)` is the open interval (0,
@@ -31,13 +32,18 @@
     point representations, :math:`\\textrm{sigmoid}(x)` will lie in the
     closed range [0, 1].
 
-    :param x: tensor-like (a Theano variable with type theano.Tensor,
-              or a value that can be converted to one) :math:`\in
-              \mathbb{R}^n`
+    Parameters
+    ----------
+    x : tensor-like
+        A Theano variable with type theano.Tensor, or a value that can be 
+        converted to one :math:`\in \mathbb{R}^n`
 
-    :return: a Theano variable with the same shape as the input, where
-             the sigmoid function is mapped to each element of the
-             input x.
+    Returns
+    -------
+    ret : a Theano variable with the same shape as the input
+        where the sigmoid function is mapped to each element of the 
+        input `x`.
+
     """
     return theano.tensor.nnet.sigmoid(x)
 
@@ -52,6 +58,7 @@
     tangent) of the input x.
 
     .. math::
+
         \\textrm{tanh}(x) = \\frac{e^{2x} - 1}{e^{2x} + 1}
 
     The image of :math:`\\textrm{tanh}(x)` is the open interval (-1,
@@ -59,13 +66,16 @@
     point representations, :math:`\\textrm{tanh}(x)` will lie in the
     closed range [-1, 1].
 
-    :param x: tensor-like (a Theano variable with type theano.Tensor,
-              or a value that can be converted to one) :math:`\in
-              \mathbb{R}^n`
+    Parameters
+    ----------
+    x : tensor-like
+        A Theano variable with type theano.Tensor, or a value that can be 
+        converted to one :math:`\in \mathbb{R}^n`
 
-    :return: a Theano variable with the same shape as the input, where
-             the tanh function is mapped to each element of the input
-             x.
+    Returns
+    -------
+    ret : a Theano variable with the same shape as the input
+        where the tanh function is mapped to each element of the input `x`.
     """
     return theano.tensor.tanh(x)
 
@@ -81,6 +91,7 @@
     TODO: where does 1.759 come from? why is it normalized like that?
 
     .. math::
+
         \\textrm{tanh\_normalized}(x) = 1.759\\textrm{ tanh}\left(\\frac{2x}{3}\\right)
 
     The image of :math:`\\textrm{tanh\_normalized}(x)` is the open
@@ -90,13 +101,17 @@
     closed range [-1.759, 1.759]. The exact bound depends on the
     precision of the floating point representation.
 
-    :param x: tensor-like (a Theano variable with type theano.Tensor,
-              or a value that can be converted to one) :math:`\in
-              \mathbb{R}^n`
+    Parameters
+    ----------
+    x : tensor-like
+        A Theano variable with type theano.Tensor, or a value that can be 
+        converted to one :math:`\in \mathbb{R}^n`
 
-    :return: a Theano variable with the same shape as the input, where
-             the tanh\_normalized function is mapped to each element of
-             the input x.
+    Returns
+    -------
+    ret : a Theano variable with the same shape as the input
+        where the tanh_normalized function is mapped to each element of 
+        the input `x`.
     """
     return 1.759*theano.tensor.tanh(0.6666*x)
 
@@ -111,6 +126,7 @@
     hyperbolic tangent of x.
 
     .. math::
+
         \\textrm{abs\_tanh}(x) = |\\textrm{tanh}(x)|
 
     The image of :math:`\\textrm{abs\_tanh}(x)` is the interval [0, 1),
@@ -118,13 +134,17 @@
     point representations, :math:`\\textrm{abs\_tanh}(x)` will lie in
     the range [0, 1].
 
-    :param x: tensor-like (a Theano variable with type theano.Tensor,
-              or a value that can be converted to one) :math:`\in
-              \mathbb{R}^n`
+    Parameters
+    ----------
+    x : tensor-like
+        A Theano variable with type theano.Tensor, or a value that can be 
+        converted to one :math:`\in \mathbb{R}^n`
 
-    :return: a Theano variable with the same shape as the input, where
-             the abs_tanh function is mapped to each element of the
-             input x.
+    Returns
+    -------
+    ret : a Theano variable with the same shape as the input
+        where the abs_tanh function is mapped to each element of 
+        the input `x`.
     """
     return theano.tensor.abs_(theano.tensor.tanh(x))
 
@@ -140,6 +160,7 @@
     TODO: where does 1.759 come from? why is it normalized like that?
 
     .. math::
+
         \\textrm{abs\_tanh\_normalized}(x) = \left|1.759\\textrm{ tanh}\left(\\frac{2x}{3}\\right)\\right|
 
     The image of :math:`\\textrm{abs\_tanh\_normalized}(x)` is the range
@@ -149,13 +170,17 @@
     approximative closed range [0, 1.759]. The exact upper bound
     depends on the precision of the floating point representation.
 
-    :param x: tensor-like (a Theano variable with type theano.Tensor,
-              or a value that can be converted to one) :math:`\in
-              \mathbb{R}^n`
+    Parameters
+    ----------
+    x: tensor-like
+        A Theano variable with type theano.Tensor, or a value that can be 
+        converted to one :math:`\in \mathbb{R}^n`
 
-    :return: a Theano variable with the same shape as the input, where
-             the abs_tanh_normalized function is mapped to each
-             element of the input x.
+    Returns
+    -------
+    ret: a Theano variable with the same shape as the input
+        where the abs_tanh_normalized function is mapped to each
+        element of the input `x`.
     """
     return theano.tensor.abs_(1.759*theano.tensor.tanh(0.6666*x))
 
@@ -167,13 +192,20 @@
     Returns a symbolic variable that computes the softsign of ``input``.
     
     .. math::
+
                 f(input) = \\frac{input}{1.0 + |input|}
 
-    :type input:  tensor-like
-    :param input: input tensor to which softsign should be applied
-    :rtype:       Theano variable
-    :return:      tensor obtained after applying the softsign function
+    Parameters
+    ----------
+    input : tensor-like
+        A Theano variable with type theano.Tensor, or a value that can be 
+        converted to one :math:`\in \mathbb{R}^n`
 
+    Returns
+    -------
+    ret : a Theano variable with the same shape as the input
+        where the softsign function is mapped to each
+        element of the input `x`.
     """
     return input/(1.0 + tensor.abs_(input))
 
@@ -186,11 +218,17 @@
     .. math::
                 f(input) = \left| \\frac{input}{1.0 +|input|} \\right|
 
-    :type input:  tensor-like
-    :param input: input tensor to which softsign should be applied
-    :rtype:       Tensor variable
-    :return:      tensor obtained by taking the absolute value of softsign 
-                  of the input
+    Parameters
+    ----------
+    input : tensor-like
+        A Theano variable with type theano.Tensor, or a value that can be 
+        converted to one :math:`\in \mathbb{R}^n`
+
+    Returns
+    -------
+    ret : a Theano variable with the same shape as the input
+        where the absolute value of the softsign function is mapped to each
+        element of the input `x`.
     """
     return tensor.abs_(input)/(1.0 + tensor.abs_(input))
 
@@ -202,19 +240,24 @@
     and only if it is positive, 0 otherwise.
 
     .. math::
+
                 f(input) = \left \lbrace \\begin{array}{l}
                             input \quad \\text{ if } input > 0 \\
                             0     \quad \\text{ else }
                          \end{array}
                          \\right \}
 
-    :type input:  tensor-like
-    :param input: input tensor to which the rectifier activation function 
-                  will be applied
-    :rtype:       Tensor variable
-    :return:      always positive tensor which equals with the input if it is also 
-                  positive or to 0 otherwise
+    Parameters
+    ----------
+    input : tensor-like
+        A Theano variable with type theano.Tensor, or a value that can be 
+        converted to one :math:`\in \mathbb{R}^n`
 
+    Returns
+    -------
+    ret : a Theano variable with the same shape as the input
+        A tensor always positive whose element equals the inputs if it is also 
+        positive or to 0 otherwise
     """
     return input*(input>=0)
 
@@ -226,12 +269,20 @@
            at initialization.
 
     .. math::
+
                 f(input) = ln \left( 1 + e^{input} \\right)
 
-    :type input:  tensor-like
-    :param input: input tensor to which the softplus should be applied
-    :rtype:       Theano variable
-    :return:      tensor obtained by applying softsign on the input
+    Parameters
+    ----------
+    input : tensor-like
+        A Theano variable with type theano.Tensor, or a value that can be 
+        converted to one :math:`\in \mathbb{R}^n`
+
+    Returns
+    -------
+    ret : a Theano variable with the same shape as the input
+        where the softsign function is mapped to each
+        element of the input `x`.
     """
     return tensor.nnet.softplus(input)
 
@@ -242,15 +293,21 @@
     ``input``.
 
     .. math::
+
                 f(input) = |input|
 
-    :type input:  tensor-like
-    :param input: input tensor
-    :rtype:       Theano variable
-    :return:      tensor that represents the absolute value of the input
+    Parameters
+    ----------
+    input : tensor-like
+        A Theano variable with type theano.Tensor, or a value that can be 
+        converted to one :math:`\in \mathbb{R}^n`
 
-
-    """
+    Returns
+    -------
+    ret : a Theano variable with the same shape as the input
+        where the absolute function is mapped to each
+        element of the input `x`.
+     """
     return theano.tensor.abs_(input)