diff deep/stacked_dae/v_sylvain/nist_sda.py @ 386:8875853b5bfc

Ajout d'une option finetune amelioree pour PNIST07
author SylvainPL <sylvain.pannetier.lebeuf@umontreal.ca>
date Tue, 27 Apr 2010 08:41:44 -0400
parents 87e684bfe538
children 7c201ca1484f
line wrap: on
line diff
--- a/deep/stacked_dae/v_sylvain/nist_sda.py	Tue Apr 27 08:20:59 2010 -0400
+++ b/deep/stacked_dae/v_sylvain/nist_sda.py	Tue Apr 27 08:41:44 2010 -0400
@@ -103,6 +103,16 @@
              "or reduce the number of pretraining epoch to run the code (better idea).\n")
         print('\n\tpretraining with P07')
         optimizer.pretrain(datasets.nist_P07(min_file=0,max_file=nb_file),decrease = dec) 
+    elif state['pretrain_choice'] == 2:
+        nb_file = int(state['pretraining_epochs_per_layer']) 
+        state['pretraining_epochs_per_layer'] = 1 #Only 1 time over the dataset
+        if nb_file >=100:
+            sys.exit("The code does not support this much pretraining epoch (99 max with P07).\n"+
+            "You have to correct the code (and be patient, P07 is huge !!)\n"+
+             "or reduce the number of pretraining epoch to run the code (better idea).\n")
+        print('\n\tpretraining with PNIST07')
+        optimizer.pretrain(datasets.PNIST07(min_file=0,max_file=nb_file),decrease = dec)
+        
     channel.save()
     
     #Set some of the parameters used for the finetuning
@@ -145,9 +155,10 @@
         optimizer.reload_parameters('params_pretrain.txt')
         optimizer.finetune(datasets.nist_all(),datasets.nist_P07(),max_finetune_epoch_NIST,ind_test=1,special=1,decrease=decrease_lr)
     if finetune_choice == 4:
-        print ('\n\n\tFinetune with PNIST07\n\n')
+        print ('\n\n\tFinetune with PNIST07 then NIST\n\n')
         optimizer.reload_parameters('params)pretrain.txt')
-        optimizer.finetune(datasets.PNIST07(),datasets.nist_all(),max_finetune_epoch_NIST,ind_test=2,decrease=decrease_lr,dataset_test2=datasets.nist_P07())    
+        optimizer.finetune(datasets.PNIST07(),datasets.nist_all(),max_finetune_epoch_NIST,ind_test=30,decrease=decrease_lr,dataset_test2=datasets.nist_P07())    
+        optimizer.finetune(datasets.nist_all(),datasets.PNIST07(),max_finetune_epoch_NIST,ind_test=31,decrease=decrease_lr,dataset_test2=datasets.nist_P07())    
         
     if finetune_choice==-1:
         print('\nSERIE OF 4 DIFFERENT FINETUNINGS')
@@ -176,6 +187,9 @@
         sys.stdout.flush()
         optimizer.reload_parameters('params)pretrain.txt')
         optimizer.finetune(datasets.PNIST07(),datasets.nist_all(),max_finetune_epoch_NIST,ind_test=2,decrease=decrease_lr,dataset_test2=datasets.nist_P07())    
+        channel.save()
+        sys.stdout.flush()
+        optimizer.finetune(datasets.nist_all(),datasets.PNIST07(),max_finetune_epoch_NIST,ind_test=31,decrease=decrease_lr,dataset_test2=datasets.nist_P07())    
         
     
     channel.save()