changeset 456:66b05c6077c7

Ajout d'option pour choisir l'ensemble de donnees (train/valid/test) ainsi que mettre NIST,P07 et PNIST pour tous les modeles testes
author SylvainPL <sylvain.pannetier.lebeuf@umontreal.ca>
date Wed, 26 May 2010 20:24:16 -0400
parents 09e1c5872c2b
children 78ed4628071d
files deep/stacked_dae/v_sylvain/train_error.py
diffstat 1 files changed, 55 insertions(+), 31 deletions(-) [+]
line wrap: on
line diff
--- a/deep/stacked_dae/v_sylvain/train_error.py	Wed May 26 20:23:02 2010 -0400
+++ b/deep/stacked_dae/v_sylvain/train_error.py	Wed May 26 20:24:16 2010 -0400
@@ -7,6 +7,7 @@
 import numpy 
 import theano
 import time
+import math
 
 import pylearn.version
 import theano.tensor as T
@@ -37,7 +38,7 @@
 Function called by jobman upon launching each job
 Its path is the one given when inserting jobs: see EXPERIMENT_PATH
 '''
-def jobman_entrypoint(state, channel):
+def jobman_entrypoint(state, channel,set_choice):
     # record mercurial versions of each package
     pylearn.version.record_versions(state,[theano,ift6266,pylearn])
     # TODO: remove this, bad for number of simultaneous requests on DB
@@ -57,7 +58,10 @@
     examples_per_epoch = NIST_ALL_TRAIN_SIZE
 
     PATH = ''
-    maximum_exemples=int(500000) #Maximum number of exemples seen
+    if set_choice == 0:
+        maximum_exemples=int(500000) #Maximum number of exemples seen
+    else:
+        maximum_exemples = int(1000000000)  #an impossible number
 
 
 
@@ -71,57 +75,68 @@
 
 
     
-    
 
     if os.path.exists(PATH+'params_finetune_NIST.txt'):
         print ('\n finetune = NIST ')
         optimizer.reload_parameters(PATH+'params_finetune_NIST.txt')
-        print "For" + str(maximum_exemples) + "over the NIST training set: "
-        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples))
+        print "For" + str(maximum_exemples) + "over the NIST set: "
+        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples),set_choice)
+        print "For" + str(maximum_exemples) + "over the P07 set: "
+        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples),set_choice)
+        print "For" + str(maximum_exemples) + "over the PNIST07 set: "
+        optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples),set_choice)
         
     
     if os.path.exists(PATH+'params_finetune_P07.txt'):
         print ('\n finetune = P07 ')
         optimizer.reload_parameters(PATH+'params_finetune_P07.txt')
-        print "For" + str(maximum_exemples) + "over the P07 training set: "
-        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples))
+        print "For" + str(maximum_exemples) + "over the NIST set: "
+        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples),set_choice)
+        print "For" + str(maximum_exemples) + "over the P07 set: "
+        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples),set_choice)
+        print "For" + str(maximum_exemples) + "over the PNIST07 set: "
+        optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples),set_choice)
 
     
     if os.path.exists(PATH+'params_finetune_NIST_then_P07.txt'):
         print ('\n finetune = NIST then P07')
         optimizer.reload_parameters(PATH+'params_finetune_NIST_then_P07.txt')
-        print "For" + str(maximum_exemples) + "over the NIST training set: "
-        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples))
-        print "For" + str(maximum_exemples) + "over the P07 training set: "
-        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples))
+        print "For" + str(maximum_exemples) + "over the NIST set: "
+        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples),set_choice)
+        print "For" + str(maximum_exemples) + "over the P07 set: "
+        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples),set_choice)
+        print "For" + str(maximum_exemples) + "over the PNIST07 set: "
+        optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples),set_choice)
     
     if os.path.exists(PATH+'params_finetune_P07_then_NIST.txt'):
         print ('\n finetune = P07 then NIST')
         optimizer.reload_parameters(PATH+'params_finetune_P07_then_NIST.txt')
-        print "For" + str(maximum_exemples) + "over the P07 training set: "
-        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples))
-        print "For" + str(maximum_exemples) + "over the NIST training set: "
-        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples))
+        print "For" + str(maximum_exemples) + "over the NIST set: "
+        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples),set_choice)
+        print "For" + str(maximum_exemples) + "over the P07 set: "
+        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples),set_choice)
+        print "For" + str(maximum_exemples) + "over the PNIST07 set: "
+        optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples),set_choice)
     
     if os.path.exists(PATH+'params_finetune_PNIST07.txt'):
         print ('\n finetune = PNIST07')
         optimizer.reload_parameters(PATH+'params_finetune_PNIST07.txt')
-        print "For" + str(maximum_exemples) + "over the NIST training set: "
-        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples))
-        print "For" + str(maximum_exemples) + "over the P07 training set: "
-        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples))
-        print "For" + str(maximum_exemples) + "over the PNIST07 training set: "
-        optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples))
+        print "For" + str(maximum_exemples) + "over the NIST set: "
+        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples),set_choice)
+        print "For" + str(maximum_exemples) + "over the P07 set: "
+        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples),set_choice)
+        print "For" + str(maximum_exemples) + "over the PNIST07 set: "
+        optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples),set_choice)
         
     if os.path.exists(PATH+'params_finetune_PNIST07_then_NIST.txt'):
         print ('\n finetune = PNIST07 then NIST')
         optimizer.reload_parameters(PATH+'params_finetune_PNIST07_then_NIST.txt')
-        print "For" + str(maximum_exemples) + "over the NIST training set: "
-        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples))
-        print "For" + str(maximum_exemples) + "over the P07 training set: "
-        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples))
-        print "For" + str(maximum_exemples) + "over the PNIST07 training set: "
-        optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples))
+        print "For" + str(maximum_exemples) + "over the NIST set: "
+        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples),set_choice)
+        print "For" + str(maximum_exemples) + "over the P07 set: "
+        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples),set_choice)
+        print "For" + str(maximum_exemples) + "over the PNIST07 set: "
+        optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples),set_choice)
     
     channel.save()
 
@@ -130,9 +145,18 @@
 
 
 if __name__ == '__main__':
+    
+    args = sys.argv[1:]
+
+    type = 0
+    if len(args) > 0 and args[0] == 'train':
+        type = 0
+    elif len(args) > 0 and args[0] == 'valid':
+        type = 1
+    elif len(args) > 0 and args[0] == 'test':
+        type = 2
+        
+    chanmock = DD({'COMPLETE':0,'save':(lambda:None)})
+    jobman_entrypoint(DD(DEFAULT_HP_NIST), chanmock, type)
 
 
-    chanmock = DD({'COMPLETE':0,'save':(lambda:None)})
-    jobman_entrypoint(DD(DEFAULT_HP_NIST), chanmock)
-
-