annotate deep/convolutional_dae/run_exp.py @ 331:c2331b8e4b89

Ajout d'une ligne faisant rapetisser le finetune_lr pour NIST lorsqu'on a P07+NIST
author SylvainPL <sylvain.pannetier.lebeuf@umontreal.ca>
date Wed, 14 Apr 2010 10:17:33 -0400
parents 6eab220a7d70
children 69109e41983e
rev   line source
291
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
1 from ift6266.deep.convolutional_dae.scdae import *
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
2
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
3 class dumb(object):
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
4 def save(self):
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
5 pass
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
6
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
7 def go(state, channel):
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
8 from ift6266 import datasets
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
9 from ift6266.deep.convolutional_dae.sgd_opt import sgd_opt
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
10 import pylearn, theano, ift6266
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
11 import pylearn.version
293
d89820070ea0 Add some prints to see the current step.
Arnaud Bergeron <abergeron@gmail.com>
parents: 292
diff changeset
12 import sys
291
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
13
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
14 # params: bsize, pretrain_lr, train_lr, nfilts1, nfilts2, nftils3, nfilts4
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
15 # pretrain_rounds, noise, mlp_sz
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
16
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
17 pylearn.version.record_versions(state, [theano, ift6266, pylearn])
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
18 # TODO: maybe record pynnet version?
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
19 channel.save()
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
20
300
6eab220a7d70 Adjust sgd_opt parameters and use nist_all.
Arnaud Bergeron <abergeron@gmail.com>
parents: 298
diff changeset
21 dset = datasets.nist_all()
291
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
22
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
23 nfilts = []
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
24 if state.nfilts1 != 0:
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
25 nfilts.append(state.nfilts1)
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
26 if state.nfilts2 != 0:
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
27 nfilts.append(state.nfilts2)
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
28 if state.nfilts3 != 0:
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
29 nfilts.append(state.nfilts3)
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
30 if state.nfilts4 != 0:
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
31 nfilts.append(state.nfilts4)
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
32
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
33 fsizes = [(5,5)]*len(nfilts)
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
34 subs = [(2,2)]*len(nfilts)
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
35 noise = [state.noise]*len(nfilts)
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
36
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
37 pretrain_funcs, trainf, evalf, net = build_funcs(
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
38 img_size=(32, 32),
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
39 batch_size=state.bsize,
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
40 filter_sizes=fsizes,
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
41 num_filters=nfilts,
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
42 subs=subs,
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
43 noise=noise,
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
44 mlp_sizes=[state.mlp_sz],
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
45 out_size=62,
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
46 dtype=numpy.float32,
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
47 pretrain_lr=state.pretrain_lr,
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
48 train_lr=state.train_lr)
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
49
298
a222af1d0598 - Adapt to scdae to input_shape change in pynnet
Arnaud Bergeron <abergeron@gmail.com>
parents: 294
diff changeset
50 t_it = repeat_itf(dset.train, state.bsize)
291
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
51 pretrain_fs, train, valid, test = massage_funcs(
298
a222af1d0598 - Adapt to scdae to input_shape change in pynnet
Arnaud Bergeron <abergeron@gmail.com>
parents: 294
diff changeset
52 t_it, t_it, dset, state.bsize,
292
8108d271c30c Fix stuff (imports, ...) so that it can run under jobman properly.
Arnaud Bergeron <abergeron@gmail.com>
parents: 291
diff changeset
53 pretrain_funcs, trainf,evalf)
291
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
54
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
55 series = create_series()
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
56
293
d89820070ea0 Add some prints to see the current step.
Arnaud Bergeron <abergeron@gmail.com>
parents: 292
diff changeset
57 print "pretraining ..."
d89820070ea0 Add some prints to see the current step.
Arnaud Bergeron <abergeron@gmail.com>
parents: 292
diff changeset
58 sys.stdout.flush()
291
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
59 do_pretrain(pretrain_fs, state.pretrain_rounds, series['recons_error'])
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
60
293
d89820070ea0 Add some prints to see the current step.
Arnaud Bergeron <abergeron@gmail.com>
parents: 292
diff changeset
61 print "training ..."
d89820070ea0 Add some prints to see the current step.
Arnaud Bergeron <abergeron@gmail.com>
parents: 292
diff changeset
62 sys.stdout.flush()
294
8babd43235dd Save best valid score and test score in the db.
Arnaud Bergeron <abergeron@gmail.com>
parents: 293
diff changeset
63 best_valid, test_score = sgd_opt(train, valid, test,
300
6eab220a7d70 Adjust sgd_opt parameters and use nist_all.
Arnaud Bergeron <abergeron@gmail.com>
parents: 298
diff changeset
64 training_epochs=1000000, patience=2500,
294
8babd43235dd Save best valid score and test score in the db.
Arnaud Bergeron <abergeron@gmail.com>
parents: 293
diff changeset
65 patience_increase=2.,
8babd43235dd Save best valid score and test score in the db.
Arnaud Bergeron <abergeron@gmail.com>
parents: 293
diff changeset
66 improvement_threshold=0.995,
300
6eab220a7d70 Adjust sgd_opt parameters and use nist_all.
Arnaud Bergeron <abergeron@gmail.com>
parents: 298
diff changeset
67 validation_frequency=500,
294
8babd43235dd Save best valid score and test score in the db.
Arnaud Bergeron <abergeron@gmail.com>
parents: 293
diff changeset
68 series=series, net=net)
8babd43235dd Save best valid score and test score in the db.
Arnaud Bergeron <abergeron@gmail.com>
parents: 293
diff changeset
69 state.best_valid = best_valid
8babd43235dd Save best valid score and test score in the db.
Arnaud Bergeron <abergeron@gmail.com>
parents: 293
diff changeset
70 state.test_score = test_score
8babd43235dd Save best valid score and test score in the db.
Arnaud Bergeron <abergeron@gmail.com>
parents: 293
diff changeset
71 channel.save()
8babd43235dd Save best valid score and test score in the db.
Arnaud Bergeron <abergeron@gmail.com>
parents: 293
diff changeset
72 return channel.COMPLETE
291
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
73
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
74 if __name__ == '__main__':
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
75 st = dumb()
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
76 st.bsize = 100
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
77 st.pretrain_lr = 0.01
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
78 st.train_lr = 0.1
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
79 st.nfilts1 = 4
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
80 st.nfilts2 = 4
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
81 st.nfilts3 = 0
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
82 st.pretrain_rounds = 500
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
83 st.noise=0.2
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
84 st.mlp_sz = 500
7d1fa2d7721c Split out the run_exp method.
Arnaud Bergeron <abergeron@gmail.com>
parents:
diff changeset
85 go(st, dumb())