changeset 503:c7ce66b4e8f4

Extensions to algorithms, and some cleanup (by defining linear_output result).
author Joseph Turian <turian@gmail.com>
date Wed, 29 Oct 2008 03:29:18 -0400
parents 17945defd813
children 19ab9ce916e3
files algorithms/_test_logistic_regression.py algorithms/daa.py algorithms/layer.py algorithms/logistic_regression.py algorithms/stacker.py
diffstat 5 files changed, 28 insertions(+), 7 deletions(-) [+]
line wrap: on
line diff
--- a/algorithms/_test_logistic_regression.py	Wed Oct 29 02:08:56 2008 -0400
+++ b/algorithms/_test_logistic_regression.py	Wed Oct 29 03:29:18 2008 -0400
@@ -2,8 +2,8 @@
 import sys, time
 
 if __name__ == '__main__':
-    pprint.assign(nnet_ops.crossentropy_softmax_1hot_with_bias_dx, printing.FunctionPrinter('xsoftmaxdx'))
-    pprint.assign(nnet_ops.crossentropy_softmax_argmax_1hot_with_bias, printing.FunctionPrinter('nll', 'softmax', 'argmax'))
+    pprint.assign(nnet.crossentropy_softmax_1hot_with_bias_dx, printing.FunctionPrinter('xsoftmaxdx'))
+    pprint.assign(nnet.crossentropy_softmax_argmax_1hot_with_bias, printing.FunctionPrinter('nll', 'softmax', 'argmax'))
     if 1:
         lrc = Module_Nclass()
 
--- a/algorithms/daa.py	Wed Oct 29 02:08:56 2008 -0400
+++ b/algorithms/daa.py	Wed Oct 29 03:29:18 2008 -0400
@@ -101,6 +101,8 @@
         self.representation = theano.Method(self.input, self.hidden)
         self.reconstruction_through_noise = theano.Method(self.input, [self.corrupted_input, self.noutput])
 
+        self.validate = theano.Method(self.input, [self.cost, self.output])
+
     def _instance_initialize(self, obj, input_size = None, hidden_size = None, seed = None, **init):
         if (input_size is None) ^ (hidden_size is None):
             raise ValueError("Must specify input_size and hidden_size or neither.")
--- a/algorithms/layer.py	Wed Oct 29 02:08:56 2008 -0400
+++ b/algorithms/layer.py	Wed Oct 29 03:29:18 2008 -0400
@@ -3,6 +3,8 @@
     input, cost, lr, and update
 (a Method called update, to be more precise, whose first argument is the input)
 
+input_dimension, output_dimension (aliased as nin and nout)
+
 Modules like pylearn.algorithms.logistic_regression.Module_Nclass and
 pylearn.algorithms.???.Bin_Regressor should inherit from Layer and
 Stacker should assume Layer.
--- a/algorithms/logistic_regression.py	Wed Oct 29 02:08:56 2008 -0400
+++ b/algorithms/logistic_regression.py	Wed Oct 29 03:29:18 2008 -0400
@@ -18,6 +18,8 @@
         self.b = N.zeros(n_out)
         self.lr = 0.01
         self.__hide__ = ['params']
+        self.input_dimension = n_in
+        self.output_dimension = n_out
 
 class Module_Nclass(module.FancyModule):
     InstanceType = LogRegInstanceType
@@ -34,22 +36,35 @@
 
         self.params = [p for p in [self.w, self.b] if p.owner is None]
 
-        xent, output = nnet.crossentropy_softmax_1hot(
-                T.dot(self.x, self.w) + self.b, self.targ)
+        linear_output = T.dot(self.x, self.w) + self.b
+
+        (xent, softmax, max_pr, argmax) = nnet.crossentropy_softmax_max_and_argmax_1hot(
+                linear_output, self.targ)
         sum_xent = T.sum(xent)
 
-        self.output = output
+        self.softmax = softmax
+        self.argmax = argmax 
+        self.max_pr = max_pr
         self.sum_xent = sum_xent
 
+        # Softmax being computed directly.
+        softmax_unsupervised = nnet.softmax(linear_output)
+        self.softmax_unsupervised = softmax_unsupervised
+
         #compatibility with current implementation of stacker/daa or something
         #TODO: remove this, make a wrapper
-        self.cost = sum_xent
+        self.cost = self.sum_xent
         self.input = self.x
+        # TODO: I want to make output = linear_output.
+        self.output = self.softmax_unsupervised
 
         #define the apply method
-        self.pred = T.argmax(T.dot(self.input, self.w) + self.b, axis=1)
+        self.pred = T.argmax(linear_output, axis=1)
         self.apply = module.Method([self.input], self.pred)
 
+        self.validate = module.Method([self.input, self.targ], [self.cost, self.argmax, self.max_pr])
+        self.softmax_output = module.Method([self.input], self.softmax_unsupervised)
+
         if self.params:
             gparams = T.grad(sum_xent, self.params)
 
--- a/algorithms/stacker.py	Wed Oct 29 02:08:56 2008 -0400
+++ b/algorithms/stacker.py	Wed Oct 29 03:29:18 2008 -0400
@@ -72,6 +72,8 @@
             if layer.lr is None:
                 layer.lr = lr
         if nunits:
+            obj.input_dimension = nunits[0]
+            obj.output_dimension = nunits[-1]
             if len(nunits) != len(obj.layers) + 1:
                 raise ValueError('You should give exactly one more unit numbers as there are layers.')
             for ni, no, layer in zip(nunits[:-1], nunits[1:], obj.layers):