diff deep/stacked_dae/v_sylvain/train_error.py @ 459:5ead24fd4d49

merge
author Yoshua Bengio <bengioy@iro.umontreal.ca>
date Thu, 27 May 2010 08:29:26 -0600
parents 66b05c6077c7
children
line wrap: on
line diff
--- a/deep/stacked_dae/v_sylvain/train_error.py	Thu May 27 08:29:04 2010 -0600
+++ b/deep/stacked_dae/v_sylvain/train_error.py	Thu May 27 08:29:26 2010 -0600
@@ -7,6 +7,7 @@
 import numpy 
 import theano
 import time
+import math
 
 import pylearn.version
 import theano.tensor as T
@@ -37,7 +38,7 @@
 Function called by jobman upon launching each job
 Its path is the one given when inserting jobs: see EXPERIMENT_PATH
 '''
-def jobman_entrypoint(state, channel):
+def jobman_entrypoint(state, channel,set_choice):
     # record mercurial versions of each package
     pylearn.version.record_versions(state,[theano,ift6266,pylearn])
     # TODO: remove this, bad for number of simultaneous requests on DB
@@ -57,7 +58,10 @@
     examples_per_epoch = NIST_ALL_TRAIN_SIZE
 
     PATH = ''
-    maximum_exemples=int(500000) #Maximum number of exemples seen
+    if set_choice == 0:
+        maximum_exemples=int(500000) #Maximum number of exemples seen
+    else:
+        maximum_exemples = int(1000000000)  #an impossible number
 
 
 
@@ -71,57 +75,68 @@
 
 
     
-    
 
     if os.path.exists(PATH+'params_finetune_NIST.txt'):
         print ('\n finetune = NIST ')
         optimizer.reload_parameters(PATH+'params_finetune_NIST.txt')
-        print "For" + str(maximum_exemples) + "over the NIST training set: "
-        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples))
+        print "For" + str(maximum_exemples) + "over the NIST set: "
+        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples),set_choice)
+        print "For" + str(maximum_exemples) + "over the P07 set: "
+        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples),set_choice)
+        print "For" + str(maximum_exemples) + "over the PNIST07 set: "
+        optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples),set_choice)
         
     
     if os.path.exists(PATH+'params_finetune_P07.txt'):
         print ('\n finetune = P07 ')
         optimizer.reload_parameters(PATH+'params_finetune_P07.txt')
-        print "For" + str(maximum_exemples) + "over the P07 training set: "
-        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples))
+        print "For" + str(maximum_exemples) + "over the NIST set: "
+        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples),set_choice)
+        print "For" + str(maximum_exemples) + "over the P07 set: "
+        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples),set_choice)
+        print "For" + str(maximum_exemples) + "over the PNIST07 set: "
+        optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples),set_choice)
 
     
     if os.path.exists(PATH+'params_finetune_NIST_then_P07.txt'):
         print ('\n finetune = NIST then P07')
         optimizer.reload_parameters(PATH+'params_finetune_NIST_then_P07.txt')
-        print "For" + str(maximum_exemples) + "over the NIST training set: "
-        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples))
-        print "For" + str(maximum_exemples) + "over the P07 training set: "
-        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples))
+        print "For" + str(maximum_exemples) + "over the NIST set: "
+        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples),set_choice)
+        print "For" + str(maximum_exemples) + "over the P07 set: "
+        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples),set_choice)
+        print "For" + str(maximum_exemples) + "over the PNIST07 set: "
+        optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples),set_choice)
     
     if os.path.exists(PATH+'params_finetune_P07_then_NIST.txt'):
         print ('\n finetune = P07 then NIST')
         optimizer.reload_parameters(PATH+'params_finetune_P07_then_NIST.txt')
-        print "For" + str(maximum_exemples) + "over the P07 training set: "
-        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples))
-        print "For" + str(maximum_exemples) + "over the NIST training set: "
-        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples))
+        print "For" + str(maximum_exemples) + "over the NIST set: "
+        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples),set_choice)
+        print "For" + str(maximum_exemples) + "over the P07 set: "
+        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples),set_choice)
+        print "For" + str(maximum_exemples) + "over the PNIST07 set: "
+        optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples),set_choice)
     
     if os.path.exists(PATH+'params_finetune_PNIST07.txt'):
         print ('\n finetune = PNIST07')
         optimizer.reload_parameters(PATH+'params_finetune_PNIST07.txt')
-        print "For" + str(maximum_exemples) + "over the NIST training set: "
-        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples))
-        print "For" + str(maximum_exemples) + "over the P07 training set: "
-        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples))
-        print "For" + str(maximum_exemples) + "over the PNIST07 training set: "
-        optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples))
+        print "For" + str(maximum_exemples) + "over the NIST set: "
+        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples),set_choice)
+        print "For" + str(maximum_exemples) + "over the P07 set: "
+        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples),set_choice)
+        print "For" + str(maximum_exemples) + "over the PNIST07 set: "
+        optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples),set_choice)
         
     if os.path.exists(PATH+'params_finetune_PNIST07_then_NIST.txt'):
         print ('\n finetune = PNIST07 then NIST')
         optimizer.reload_parameters(PATH+'params_finetune_PNIST07_then_NIST.txt')
-        print "For" + str(maximum_exemples) + "over the NIST training set: "
-        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples))
-        print "For" + str(maximum_exemples) + "over the P07 training set: "
-        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples))
-        print "For" + str(maximum_exemples) + "over the PNIST07 training set: "
-        optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples))
+        print "For" + str(maximum_exemples) + "over the NIST set: "
+        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples),set_choice)
+        print "For" + str(maximum_exemples) + "over the P07 set: "
+        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples),set_choice)
+        print "For" + str(maximum_exemples) + "over the PNIST07 set: "
+        optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples),set_choice)
     
     channel.save()
 
@@ -130,9 +145,18 @@
 
 
 if __name__ == '__main__':
+    
+    args = sys.argv[1:]
+
+    type = 0
+    if len(args) > 0 and args[0] == 'train':
+        type = 0
+    elif len(args) > 0 and args[0] == 'valid':
+        type = 1
+    elif len(args) > 0 and args[0] == 'test':
+        type = 2
+        
+    chanmock = DD({'COMPLETE':0,'save':(lambda:None)})
+    jobman_entrypoint(DD(DEFAULT_HP_NIST), chanmock, type)
 
 
-    chanmock = DD({'COMPLETE':0,'save':(lambda:None)})
-    jobman_entrypoint(DD(DEFAULT_HP_NIST), chanmock)
-
-