changeset 497:a272f4cbf004

'x' => 'input' 'y' => 'output'
author Joseph Turian <turian@gmail.com>
date Tue, 28 Oct 2008 12:25:04 -0400
parents f13847478c6d
children 2be795cc5c3a
files algorithms/logistic_regression.py
diffstat 1 files changed, 16 insertions(+), 16 deletions(-) [+]
line wrap: on
line diff
--- a/algorithms/logistic_regression.py	Tue Oct 28 12:09:49 2008 -0400
+++ b/algorithms/logistic_regression.py	Tue Oct 28 12:25:04 2008 -0400
@@ -18,10 +18,10 @@
             self.lr = 0.01
             self.__hide__ = ['params']
 
-    def __init__(self, x=None, targ=None, w=None, b=None, lr=None, regularize=False):
+    def __init__(self, input=None, targ=None, w=None, b=None, lr=None, regularize=False):
         super(Module_Nclass, self).__init__() #boilerplate
 
-        self.x = x if x is not None else T.matrix()
+        self.input = input if input is not None else T.matrix('input')
         self.targ = targ if targ is not None else T.lvector()
 
         self.w = w if w is not None else module.Member(T.dmatrix())
@@ -30,22 +30,22 @@
 
         self.params = [p for p in [self.w, self.b] if p.owner is None]
 
-        xent, y = nnet.crossentropy_softmax_1hot(
-                T.dot(self.x, self.w) + self.b, self.targ)
+        xent, output = nnet.crossentropy_softmax_1hot(
+                T.dot(self.input, self.w) + self.b, self.targ)
         sum_xent = T.sum(xent)
 
-        self.y = y
+        self.output = output
         self.sum_xent = sum_xent
         self.cost = sum_xent
 
         #define the apply method
-        self.pred = T.argmax(T.dot(self.x, self.w) + self.b, axis=1)
-        self.apply = module.Method([self.x], self.pred)
+        self.pred = T.argmax(T.dot(self.input, self.w) + self.b, axis=1)
+        self.apply = module.Method([self.input], self.pred)
 
         if self.params:
             gparams = T.grad(sum_xent, self.params)
 
-            self.update = module.Method([self.x, self.targ], sum_xent,
+            self.update = module.Method([self.input, self.targ], sum_xent,
                     updates = dict((p, p - self.lr * g) for p, g in zip(self.params, gparams)))
 
 class Module(module.FancyModule):
@@ -58,10 +58,10 @@
             self.lr = 0.01
             self.__hide__ = ['params']
 
-    def __init__(self, x=None, targ=None, w=None, b=None, lr=None, regularize=False):
+    def __init__(self, input=None, targ=None, w=None, b=None, lr=None, regularize=False):
         super(Module, self).__init__() #boilerplate
 
-        self.x = x if x is not None else T.matrix()
+        self.input = input if input is not None else T.matrix('input')
         self.targ = targ if targ is not None else T.lcol()
 
         self.w = w if w is not None else module.Member(T.dmatrix())
@@ -70,23 +70,23 @@
 
         self.params = [p for p in [self.w, self.b] if p.owner is None]
 
-        y = nnet.sigmoid(T.dot(self.x, self.w))
-        xent = -self.targ * T.log(y) - (1.0 - self.targ) * T.log(1.0 - y)
+        output = nnet.sigmoid(T.dot(self.x, self.w))
+        xent = -self.targ * T.log(output) - (1.0 - self.targ) * T.log(1.0 - output)
         sum_xent = T.sum(xent)
 
-        self.y = y
+        self.output = output
         self.xent = xent
         self.sum_xent = sum_xent
         self.cost = sum_xent
 
         #define the apply method
-        self.pred = (T.dot(self.x, self.w) + self.b) > 0.0
-        self.apply = module.Method([self.x], self.pred)
+        self.pred = (T.dot(self.input, self.w) + self.b) > 0.0
+        self.apply = module.Method([self.input], self.pred)
 
         #if this module has any internal parameters, define an update function for them
         if self.params:
             gparams = T.grad(sum_xent, self.params)
-            self.update = module.Method([self.x, self.targ], sum_xent,
+            self.update = module.Method([self.input, self.targ], sum_xent,
                                         updates = dict((p, p - self.lr * g) for p, g in zip(self.params, gparams)))