changeset 206:f2ddc795ec49

changes made with Pascal but should probably be discarded
author Yoshua Bengio <bengioy@iro.umontreal.ca>
date Fri, 16 May 2008 16:36:27 -0400
parents d1359de1ea13
children c5a7105fa40b
files mlp_factory_approach.py
diffstat 1 files changed, 33 insertions(+), 19 deletions(-) [+]
line wrap: on
line diff
--- a/mlp_factory_approach.py	Wed May 14 14:06:52 2008 -0400
+++ b/mlp_factory_approach.py	Fri May 16 16:36:27 2008 -0400
@@ -27,8 +27,9 @@
                 for i in xrange(100):
                     for input, target in trainset.minibatches(['input', 'target'],
                             minibatch_size=min(32, len(trainset))):
-                        dummy = update_fn(input, target[:,0], *params)
-                        if 0: print dummy[0] #the nll
+                        results = update_fn(input, target[:,0], *params)
+                        if 0: print results[0]
+                        # print params['b']
 
         def __call__(self, testset,
                 output_fieldnames=['output_class'],
@@ -39,7 +40,7 @@
             """Apply this model (as a function) to new data"""
             inputs = [self.nnet.v.input, self.nnet.v.target] + self.nnet.v.params
             fn = _function(inputs, [getattr(self.nnet.v, name) for name in output_fieldnames])
-            if 'target' in testset.fields():
+            if 'target' in testset.fieldNames():
                 return dataset.ApplyFunctionDataSet(testset, 
                     lambda input, target: fn(input, target[:,0], *self.params),
                     output_fieldnames)
@@ -49,9 +50,11 @@
                     output_fieldnames)
 
     def __init__(self, ninputs, nhid, nclass, lr, nepochs, 
-            l2coef=0.0,
-            linker='c&yp', 
-            hidden_layer=None):
+                 l2coef=0.0,
+                 linker='c&yp', 
+                 hidden_layer=None):
+        if not hidden_layer:
+            hidden_layer = AffineSigmoidLayer("hidden",ninputs,nhid,l2coef)
         class Vars:
             def __init__(self, lr, l2coef):
                 lr = t.constant(lr)
@@ -61,16 +64,11 @@
                 W2 = t.matrix('W2')
                 b2 = t.vector('b2')
 
-                if hidden_layer:
-                    hid, hid_params, hid_ivals, hid_regularization = hidden_layer(input)
-                else:
-                    W1 = t.matrix('W1')
-                    b1 = t.vector('b1')
-                    hid = t.tanh(b1 + t.dot(input, W1))
-                    hid_params = [W1, b1]
-                    hid_regularization = l2coef * t.sum(W1*W1)
-                    hid_ivals = lambda : [_randshape(ninputs, nhid), _randshape(nhid)]
-
+                hid = hidden_layer(input)
+                hid_params = hidden_layer.params()
+                hid_params_init_vals = hidden_layer.params_ivals()
+                hid_regularization = hidden_layer.regularization()
+                    
                 params = [W2, b2] + hid_params
                 nll, predictions = nnet_ops.crossentropy_softmax_1hot( b2 + t.dot(hid, W2), target)
                 regularization = l2coef * t.sum(W2*W2) + hid_regularization
@@ -78,7 +76,7 @@
                 loss_01 = t.neq(output_class, target)
                 g_params = t.grad(nll + regularization, params)
                 new_params = [t.sub_inplace(p, lr * gp) for p,gp in zip(params, g_params)]
-                self.__dict__.update(locals()); del self.self
+                setattr_and_name(self, locals())
         self.nhid = nhid
         self.nclass = nclass
         self.nepochs = nepochs
@@ -87,14 +85,27 @@
 
     def __call__(self, trainset=None, iparams=None):
         if iparams is None:
-            iparams = [_randshape(self.nhid, self.nclass), _randshape(self.nclass)]\
-                    + self.v.hid_ivals()
+            iparams = LookupList(["W","b"],[_randshape(self.nhid, self.nclass), _randshape(self.nclass)])
+                    + self.v.hid_params_init_vals()
         rval = NeuralNet.Model(self, iparams)
         if trainset:
             rval.update(trainset)
         return rval
 
 
+def setattr_and_name(self, dict):
+    """This will do a self.__setattr__ for all elements in the dict
+    (except for element self). In addition it will make sure that
+    each element's .name (if it exists) is set to the element's key
+    in the dicitonary.
+    Typical usage:  setattr_and_name(self, locals())  """
+    for varname,var in locals.items():
+        if var is not self:
+            if hasattr(var,"name") and not var.name:
+                var.name=varname
+            self.__setattr__(varname,var)
+
+
 if __name__ == '__main__':
     training_set1 = dataset.ArrayDataSet(numpy.array([[0, 0, 0],
                                                      [0, 1, 1],
@@ -112,8 +123,11 @@
                                                      [1, 1, 1]]),
                                         {'input':slice(2)})
 
+
     learn_algo = NeuralNet(2, 10, 3, .1, 1000)
 
+    model = learn_algo()
+
     model1 = learn_algo(training_set1)
 
     model2 = learn_algo(training_set2)