diff deep/stacked_dae/stacked_dae.py @ 185:b9ea8e2d071a

Enlevé ce qui concernait la réutilisation de résultats de préentraînement (trop compliqué pour peu de bénéfice: c'est le finetuning qui est vraiment long
author fsavard
date Fri, 26 Feb 2010 17:45:52 -0500
parents 1f5937e9e530
children d364a130b221
line wrap: on
line diff
--- a/deep/stacked_dae/stacked_dae.py	Fri Feb 26 15:25:44 2010 -0500
+++ b/deep/stacked_dae/stacked_dae.py	Fri Feb 26 17:45:52 2010 -0500
@@ -144,6 +144,9 @@
     def __init__(self, train_set_x, train_set_y, batch_size, n_ins, 
                  hidden_layers_sizes, n_outs, 
                  corruption_levels, rng, pretrain_lr, finetune_lr, input_divider=1.0):
+        # Just to make sure those are not modified somewhere else afterwards
+        hidden_layers_sizes = copy.deepcopy(hidden_layers_sizes)
+        corruption_levels = copy.deepcopy(corruption_levels)
         update_locals(self, locals())      
  
         self.layers             = []
@@ -239,48 +242,6 @@
 
         self.errors = self.logLayer.errors(self.y)
 
-    @classmethod
-    def copy_reusing_lower_layers(cls, obj, num_hidden_layers, new_finetuning_lr=None):
-        assert(num_hidden_layers <= obj.n_layers)
-
-        if not new_finetuning_lr:
-            new_finetuning_lr = obj.finetune_lr
-
-        new_sda = cls(train_set_x= obj.train_set_x, \
-                      train_set_y = obj.train_set_y,\
-                      batch_size = obj.batch_size, \
-                      n_ins= obj.n_ins, \
-                      hidden_layers_sizes = obj.hidden_layers_sizes[:num_hidden_layers], \
-                      n_outs = obj.n_outs, \
-                      corruption_levels = obj.corruption_levels[:num_hidden_layers],\
-                      rng = obj.rng,\
-                      pretrain_lr = obj.pretrain_lr, \
-                      finetune_lr = new_finetuning_lr, \
-                      input_divider = obj.input_divider )
-
-        # new_sda.layers contains only the hidden layers actually
-        for i, layer in enumerate(new_sda.layers):
-            original_layer = obj.layers[i]
-            for p1,p2 in zip(layer.params, original_layer.params):
-                p1.value = p2.value.copy()
-
-        return new_sda
-
-    def get_params_copy(self):
-        return copy.deepcopy(self.params)
-
-    def set_params_from_copy(self, copy):
-        # We don't want to replace the var, as the functions have pointers in there
-        # We only want to replace values.
-        for i, p in enumerate(self.params):
-            p.value = copy[i].value
-
-    def get_params_means(self):
-        s = []
-        for p in self.params:
-            s.append(numpy.mean(p.value))
-        return s
-
 if __name__ == '__main__':
     import sys
     args = sys.argv[1:]