changeset 1299:e78ced0d6540

merge
author Dumitru Erhan <dumitru.erhan@gmail.com>
date Fri, 01 Oct 2010 12:29:04 -0400
parents cba5a348a732 (current diff) 24890ca1d96b (diff)
children cc1c5720eeca a8f909502886
files doc/v2_planning/coding_style.txt
diffstat 3 files changed, 37 insertions(+), 20 deletions(-) [+]
line wrap: on
line diff
--- a/doc/v2_planning/coding_style.txt	Fri Oct 01 12:27:48 2010 -0400
+++ b/doc/v2_planning/coding_style.txt	Fri Oct 01 12:29:04 2010 -0400
@@ -114,6 +114,10 @@
             from foo import Bar, Blah
          when imported stuff is re-used multiple times in the same file, and
          there is no ambiguity.
+     DWF: One exception I'd like to propose to the "import A as B" moratorium
+          is that we adopt the "import numpy as np" standard that's used in
+          NumPy and SciPy itself. For NumPy heavy code this really cuts down
+	  on clutter, without significant impact on readability (IMHO).
 
    * Imports should usually be on separate lines.
      OD: I would add an exception, saying it is ok to group multiple imports
--- a/pylearn/formulas/costs.py	Fri Oct 01 12:27:48 2010 -0400
+++ b/pylearn/formulas/costs.py	Fri Oct 01 12:29:04 2010 -0400
@@ -1,5 +1,5 @@
 """
-This script defines a few often used cost functions.
+Common training criteria.
 """
 import theano
 import theano.tensor as T
@@ -10,13 +10,15 @@
     """ Compute the crossentropy of binary output wrt binary target.
 
     .. math::
-                L_{CE} \equiv t\log(o) + (1-t)\log(1-o) 
+                L_{CE} \equiv t\log(o) + (1-t)\log(1-o)
 
     :type output: Theano variable
     :param output: Binary output or prediction :math:`\in[0,1]`
     :type target: Theano variable
     :param target: Binary target usually :math:`\in\{0,1\}`
     """
-    return -(target * tensor.log(output) + (1.0 - target) * tensor.log(1.0 - output))
+    return -(target * T.log(output) + (1.0 - target) * T.log(1.0 - output))
 
 
+# This file seems like it has some overlap with theano.tensor.nnet.  Which functions should go
+# in which file?
--- a/pylearn/formulas/noise.py	Fri Oct 01 12:27:48 2010 -0400
+++ b/pylearn/formulas/noise.py	Fri Oct 01 12:29:04 2010 -0400
@@ -1,12 +1,14 @@
 """
+Noise functions used to train Denoising Auto-Associators.
 
-This script define the different symbolic noise functions.
+Functions in this module often include a `noise_lvl` argument that controls the amount of noise
+that the function applies.
 The noise contract is simple: noise_lvl is a symbolic variable going from 0 to 1.
-0: no changement.
-1: max noise.
+0: no change.
+1: maximum noise.
 """
 import theano
-from tags import tags
+import tags
 s="""
 * A latex mathematical description of the formulas(for picture representation in generated documentation)
 * Tags(for searching):
@@ -19,35 +21,44 @@
 * Tell the domaine, range of the input/output(range should use the english notation of including or excluding)
 """
 
-@tags('noise','binomial','salt')
-def binomial_noise(theano_rng,inp,noise_lvl):
-    """ This add binomial noise to inp. Only the salt part of pepper and salt.
+@tags.tags('noise','binomial','salt')
+def binomial_noise(theano_rng,input,noise_lvl):
+    """
+    Return `inp` with randomly-chosen elements set to zero.
+
+    TODO: MATH DEFINITION
 
-    :type inp: Theano Variable
-    :param inp: The input that we want to add noise
+    :type input: Theano tensor variable
+    :param input: input
     :type noise_lvl: float
-    :param noise_lvl: The % of noise. Between 0(no noise) and 1.
+    :param noise_lvl: The probability of setting each element to zero.
     """
-    return theano_rng.binomial( size = inp.shape, n = 1, p =  1 - noise_lvl, dtype=theano.config.floatX) * inp
+    mask = theano_rng.binomial(
+            size = inp.shape,
+            n = 1,
+            p =  1 - noise_lvl,
+            dtype=theano.config.floatX) 
+    # QUESTION: should the dtype not default to the input dtype?
+    return mask * input
 
 
-@tags('noise','binomial NLP','pepper','salt')
+@tags.tags('noise','binomial NLP','pepper','salt')
 def pepper_and_salt_noise(theano_rng,inp,noise_lvl):
     """ This add pepper and salt noise to inp
-    
-    :type inp: Theano Variable
+
+    :type inp: Theano variable
     :param inp: The input that we want to add noise
     :type noise_lvl: tuple(float,float)
-    :param noise_lvl: The % of noise for the salt and pepper. Between 0(no noise) and 1.
+    :param noise_lvl: The %% of noise for the salt and pepper. Between 0 (no noise) and 1.
     """
     return theano_rng.binomial( size = inp.shape, n = 1, p =  1 - noise_lvl[0], dtype=theano.config.floatX) * inp \
                         + (inp==0) * theano_rng.binomial( size = inp.shape, n = 1, p =  noise_lvl[1], dtype=theano.config.floatX)
 
-@tags('noise','gauss','gaussian')
+@tags.tags('noise','gauss','gaussian')
 def gaussian_noise(theano_rng,inp,noise_lvl):
     """ This add gaussian NLP noise to inp
 
-    :type inp: Theano Variable
+    :type inp: Theano variable
     :param inp: The input that we want to add noise
     :type noise_lvl: float
     :param noise_lvl: The standard deviation of the gaussian.