changeset 487:94a4c5b7293b

DAA code more generic: Can now choose activation function and reconstruction cost.
author Joseph Turian <turian@gmail.com>
date Tue, 28 Oct 2008 02:21:50 -0400
parents 5ccb1662f9f6
children e06666ac32d5
files algorithms/daa.py cost.py
diffstat 2 files changed, 19 insertions(+), 6 deletions(-) [+]
line wrap: on
line diff
--- a/algorithms/daa.py	Tue Oct 28 02:02:17 2008 -0400
+++ b/algorithms/daa.py	Tue Oct 28 02:21:50 2008 -0400
@@ -4,14 +4,22 @@
 from theano.tensor import nnet as NN
 import numpy as N
 
+from pylearn import cost as cost
+
 class DenoisingAA(T.RModule):
 
-    def __init__(self, input = None, regularize = True, tie_weights = True):
+    def __init__(self, input = None, regularize = True, tie_weights = True,
+            activation_function=NN.sigmoid, reconstruction_cost_function=cost.cross_entropy):
+        """
+        @param reconstruction_cost: Should return one cost per example (row)
+        """
         super(DenoisingAA, self).__init__()
 
         # MODEL CONFIGURATION
         self.regularize = regularize
         self.tie_weights = tie_weights
+        self.activation_function = activation_function
+        self.reconstruction_cost_function = reconstruction_cost_function
 
         # ACQUIRE/MAKE INPUT
         if not input:
@@ -114,24 +122,29 @@
         obj.__hide__ = ['params']
 
     def build_regularization(self):
+        """
+        @todo: Why do we need this function?
+        """
         return T.zero() # no regularization!
 
 
 class SigmoidXEDenoisingAA(DenoisingAA):
+    """
+    @todo: Merge this into the above.
+    """
 
     def build_corrupted_input(self):
         self.noise_level = theano.Member(T.scalar())
         return self.random.binomial(T.shape(self.input), 1, 1 - self.noise_level) * self.input
 
     def hid_activation_function(self, activation):
-        return NN.sigmoid(activation)
+        return self.activation_function(activation)
 
     def out_activation_function(self, activation):
-        return NN.sigmoid(activation)
+        return self.activation_function(activation)
 
     def build_reconstruction_costs(self, output):
-        reconstruction_cost_matrix = -(self.input * T.log(output) + (1 - self.input) * T.log(1 - output))
-        return T.sum(reconstruction_cost_matrix, axis=1)
+        return self.reconstruction_cost_function(self.input, output)
 
     def build_regularization(self):
         self.l2_coef = theano.Member(T.scalar())
--- a/cost.py	Tue Oct 28 02:02:17 2008 -0400
+++ b/cost.py	Tue Oct 28 02:21:50 2008 -0400
@@ -11,7 +11,7 @@
 from xlogx import xlogx
 
 def quadratic(target, output, axis=1):
-    return T.mean(T.sqr(target - output), axis)
+    return T.mean(T.sqr(target - output), axis=axis)
 
 def cross_entropy(target, output, axis=1):
     """