comparison deep/stacked_dae/v_sylvain/train_error.py @ 456:66b05c6077c7

Ajout d'option pour choisir l'ensemble de donnees (train/valid/test) ainsi que mettre NIST,P07 et PNIST pour tous les modeles testes
author SylvainPL <sylvain.pannetier.lebeuf@umontreal.ca>
date Wed, 26 May 2010 20:24:16 -0400
parents 0d97fead004f
children
comparison
equal deleted inserted replaced
455:09e1c5872c2b 456:66b05c6077c7
5 import pylearn 5 import pylearn
6 6
7 import numpy 7 import numpy
8 import theano 8 import theano
9 import time 9 import time
10 import math
10 11
11 import pylearn.version 12 import pylearn.version
12 import theano.tensor as T 13 import theano.tensor as T
13 from theano.tensor.shared_randomstreams import RandomStreams 14 from theano.tensor.shared_randomstreams import RandomStreams
14 15
35 36
36 ''' 37 '''
37 Function called by jobman upon launching each job 38 Function called by jobman upon launching each job
38 Its path is the one given when inserting jobs: see EXPERIMENT_PATH 39 Its path is the one given when inserting jobs: see EXPERIMENT_PATH
39 ''' 40 '''
40 def jobman_entrypoint(state, channel): 41 def jobman_entrypoint(state, channel,set_choice):
41 # record mercurial versions of each package 42 # record mercurial versions of each package
42 pylearn.version.record_versions(state,[theano,ift6266,pylearn]) 43 pylearn.version.record_versions(state,[theano,ift6266,pylearn])
43 # TODO: remove this, bad for number of simultaneous requests on DB 44 # TODO: remove this, bad for number of simultaneous requests on DB
44 channel.save() 45 channel.save()
45 46
55 n_outs = 62 # 10 digits, 26*2 (lower, capitals) 56 n_outs = 62 # 10 digits, 26*2 (lower, capitals)
56 57
57 examples_per_epoch = NIST_ALL_TRAIN_SIZE 58 examples_per_epoch = NIST_ALL_TRAIN_SIZE
58 59
59 PATH = '' 60 PATH = ''
60 maximum_exemples=int(500000) #Maximum number of exemples seen 61 if set_choice == 0:
62 maximum_exemples=int(500000) #Maximum number of exemples seen
63 else:
64 maximum_exemples = int(1000000000) #an impossible number
61 65
62 66
63 67
64 print "Creating optimizer with state, ", state 68 print "Creating optimizer with state, ", state
65 69
69 examples_per_epoch=examples_per_epoch, \ 73 examples_per_epoch=examples_per_epoch, \
70 max_minibatches=rtt) 74 max_minibatches=rtt)
71 75
72 76
73 77
74
75 78
76 if os.path.exists(PATH+'params_finetune_NIST.txt'): 79 if os.path.exists(PATH+'params_finetune_NIST.txt'):
77 print ('\n finetune = NIST ') 80 print ('\n finetune = NIST ')
78 optimizer.reload_parameters(PATH+'params_finetune_NIST.txt') 81 optimizer.reload_parameters(PATH+'params_finetune_NIST.txt')
79 print "For" + str(maximum_exemples) + "over the NIST training set: " 82 print "For" + str(maximum_exemples) + "over the NIST set: "
80 optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples)) 83 optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples),set_choice)
84 print "For" + str(maximum_exemples) + "over the P07 set: "
85 optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples),set_choice)
86 print "For" + str(maximum_exemples) + "over the PNIST07 set: "
87 optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples),set_choice)
81 88
82 89
83 if os.path.exists(PATH+'params_finetune_P07.txt'): 90 if os.path.exists(PATH+'params_finetune_P07.txt'):
84 print ('\n finetune = P07 ') 91 print ('\n finetune = P07 ')
85 optimizer.reload_parameters(PATH+'params_finetune_P07.txt') 92 optimizer.reload_parameters(PATH+'params_finetune_P07.txt')
86 print "For" + str(maximum_exemples) + "over the P07 training set: " 93 print "For" + str(maximum_exemples) + "over the NIST set: "
87 optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples)) 94 optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples),set_choice)
95 print "For" + str(maximum_exemples) + "over the P07 set: "
96 optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples),set_choice)
97 print "For" + str(maximum_exemples) + "over the PNIST07 set: "
98 optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples),set_choice)
88 99
89 100
90 if os.path.exists(PATH+'params_finetune_NIST_then_P07.txt'): 101 if os.path.exists(PATH+'params_finetune_NIST_then_P07.txt'):
91 print ('\n finetune = NIST then P07') 102 print ('\n finetune = NIST then P07')
92 optimizer.reload_parameters(PATH+'params_finetune_NIST_then_P07.txt') 103 optimizer.reload_parameters(PATH+'params_finetune_NIST_then_P07.txt')
93 print "For" + str(maximum_exemples) + "over the NIST training set: " 104 print "For" + str(maximum_exemples) + "over the NIST set: "
94 optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples)) 105 optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples),set_choice)
95 print "For" + str(maximum_exemples) + "over the P07 training set: " 106 print "For" + str(maximum_exemples) + "over the P07 set: "
96 optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples)) 107 optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples),set_choice)
108 print "For" + str(maximum_exemples) + "over the PNIST07 set: "
109 optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples),set_choice)
97 110
98 if os.path.exists(PATH+'params_finetune_P07_then_NIST.txt'): 111 if os.path.exists(PATH+'params_finetune_P07_then_NIST.txt'):
99 print ('\n finetune = P07 then NIST') 112 print ('\n finetune = P07 then NIST')
100 optimizer.reload_parameters(PATH+'params_finetune_P07_then_NIST.txt') 113 optimizer.reload_parameters(PATH+'params_finetune_P07_then_NIST.txt')
101 print "For" + str(maximum_exemples) + "over the P07 training set: " 114 print "For" + str(maximum_exemples) + "over the NIST set: "
102 optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples)) 115 optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples),set_choice)
103 print "For" + str(maximum_exemples) + "over the NIST training set: " 116 print "For" + str(maximum_exemples) + "over the P07 set: "
104 optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples)) 117 optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples),set_choice)
118 print "For" + str(maximum_exemples) + "over the PNIST07 set: "
119 optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples),set_choice)
105 120
106 if os.path.exists(PATH+'params_finetune_PNIST07.txt'): 121 if os.path.exists(PATH+'params_finetune_PNIST07.txt'):
107 print ('\n finetune = PNIST07') 122 print ('\n finetune = PNIST07')
108 optimizer.reload_parameters(PATH+'params_finetune_PNIST07.txt') 123 optimizer.reload_parameters(PATH+'params_finetune_PNIST07.txt')
109 print "For" + str(maximum_exemples) + "over the NIST training set: " 124 print "For" + str(maximum_exemples) + "over the NIST set: "
110 optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples)) 125 optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples),set_choice)
111 print "For" + str(maximum_exemples) + "over the P07 training set: " 126 print "For" + str(maximum_exemples) + "over the P07 set: "
112 optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples)) 127 optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples),set_choice)
113 print "For" + str(maximum_exemples) + "over the PNIST07 training set: " 128 print "For" + str(maximum_exemples) + "over the PNIST07 set: "
114 optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples)) 129 optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples),set_choice)
115 130
116 if os.path.exists(PATH+'params_finetune_PNIST07_then_NIST.txt'): 131 if os.path.exists(PATH+'params_finetune_PNIST07_then_NIST.txt'):
117 print ('\n finetune = PNIST07 then NIST') 132 print ('\n finetune = PNIST07 then NIST')
118 optimizer.reload_parameters(PATH+'params_finetune_PNIST07_then_NIST.txt') 133 optimizer.reload_parameters(PATH+'params_finetune_PNIST07_then_NIST.txt')
119 print "For" + str(maximum_exemples) + "over the NIST training set: " 134 print "For" + str(maximum_exemples) + "over the NIST set: "
120 optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples)) 135 optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples),set_choice)
121 print "For" + str(maximum_exemples) + "over the P07 training set: " 136 print "For" + str(maximum_exemples) + "over the P07 set: "
122 optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples)) 137 optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples),set_choice)
123 print "For" + str(maximum_exemples) + "over the PNIST07 training set: " 138 print "For" + str(maximum_exemples) + "over the PNIST07 set: "
124 optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples)) 139 optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples),set_choice)
125 140
126 channel.save() 141 channel.save()
127 142
128 return channel.COMPLETE 143 return channel.COMPLETE
129 144
130 145
131 146
132 if __name__ == '__main__': 147 if __name__ == '__main__':
148
149 args = sys.argv[1:]
150
151 type = 0
152 if len(args) > 0 and args[0] == 'train':
153 type = 0
154 elif len(args) > 0 and args[0] == 'valid':
155 type = 1
156 elif len(args) > 0 and args[0] == 'test':
157 type = 2
158
159 chanmock = DD({'COMPLETE':0,'save':(lambda:None)})
160 jobman_entrypoint(DD(DEFAULT_HP_NIST), chanmock, type)
133 161
134 162
135 chanmock = DD({'COMPLETE':0,'save':(lambda:None)})
136 jobman_entrypoint(DD(DEFAULT_HP_NIST), chanmock)
137
138