comparison deep/stacked_dae/v_guillaume/train_error.py @ 436:0ca069550abd

Added : single class version of SDA
author Guillaume Sicard <guitch21@gmail.com>
date Mon, 03 May 2010 06:14:05 -0400
parents
children
comparison
equal deleted inserted replaced
435:d8129a09ffb1 436:0ca069550abd
1 #!/usr/bin/python
2 # coding: utf-8
3
4 import ift6266
5 import pylearn
6
7 import numpy
8 import theano
9 import time
10
11 import pylearn.version
12 import theano.tensor as T
13 from theano.tensor.shared_randomstreams import RandomStreams
14
15 import copy
16 import sys
17 import os
18 import os.path
19
20 from jobman import DD
21 import jobman, jobman.sql
22 from pylearn.io import filetensor
23
24 from utils import produit_cartesien_jobs
25 from copy import copy
26
27 from sgd_optimization import SdaSgdOptimizer
28
29 #from ift6266.utils.scalar_series import *
30 from ift6266.utils.seriestables import *
31 import tables
32
33 from ift6266 import datasets
34 from config import *
35
36 '''
37 Function called by jobman upon launching each job
38 Its path is the one given when inserting jobs: see EXPERIMENT_PATH
39 '''
40 def jobman_entrypoint(state, channel):
41 # record mercurial versions of each package
42 pylearn.version.record_versions(state,[theano,ift6266,pylearn])
43 # TODO: remove this, bad for number of simultaneous requests on DB
44 channel.save()
45
46 # For test runs, we don't want to use the whole dataset so
47 # reduce it to fewer elements if asked to.
48 rtt = None
49 if state.has_key('reduce_train_to'):
50 rtt = state['reduce_train_to']
51 elif REDUCE_TRAIN_TO:
52 rtt = REDUCE_TRAIN_TO
53
54 n_ins = 32*32
55 n_outs = 62 # 10 digits, 26*2 (lower, capitals)
56
57 examples_per_epoch = NIST_ALL_TRAIN_SIZE
58
59 PATH = ''
60 maximum_exemples=int(500000) #Maximum number of exemples seen
61
62
63
64 print "Creating optimizer with state, ", state
65
66 optimizer = SdaSgdOptimizer(dataset=datasets.nist_all(),
67 hyperparameters=state, \
68 n_ins=n_ins, n_outs=n_outs,\
69 examples_per_epoch=examples_per_epoch, \
70 max_minibatches=rtt)
71
72
73
74
75
76 if os.path.exists(PATH+'params_finetune_NIST.txt'):
77 print ('\n finetune = NIST ')
78 optimizer.reload_parameters(PATH+'params_finetune_NIST.txt')
79 print "For" + str(maximum_exemples) + "over the NIST training set: "
80 optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples))
81
82
83 if os.path.exists(PATH+'params_finetune_P07.txt'):
84 print ('\n finetune = P07 ')
85 optimizer.reload_parameters(PATH+'params_finetune_P07.txt')
86 print "For" + str(maximum_exemples) + "over the P07 training set: "
87 optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples))
88
89
90 if os.path.exists(PATH+'params_finetune_NIST_then_P07.txt'):
91 print ('\n finetune = NIST then P07')
92 optimizer.reload_parameters(PATH+'params_finetune_NIST_then_P07.txt')
93 print "For" + str(maximum_exemples) + "over the NIST training set: "
94 optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples))
95 print "For" + str(maximum_exemples) + "over the P07 training set: "
96 optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples))
97
98 if os.path.exists(PATH+'params_finetune_P07_then_NIST.txt'):
99 print ('\n finetune = P07 then NIST')
100 optimizer.reload_parameters(PATH+'params_finetune_P07_then_NIST.txt')
101 print "For" + str(maximum_exemples) + "over the P07 training set: "
102 optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples))
103 print "For" + str(maximum_exemples) + "over the NIST training set: "
104 optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples))
105
106 channel.save()
107
108 return channel.COMPLETE
109
110
111
112 if __name__ == '__main__':
113
114
115 chanmock = DD({'COMPLETE':0,'save':(lambda:None)})
116 jobman_entrypoint(DD(DEFAULT_HP_NIST), chanmock)
117
118