Mercurial > ift6266
diff deep/stacked_dae/v_sylvain/train_error.py @ 388:0d97fead004f
Changement pour prendre en compte l'option finetune amelioree pour PNIST07
author | SylvainPL <sylvain.pannetier.lebeuf@umontreal.ca> |
---|---|
date | Tue, 27 Apr 2010 08:42:43 -0400 |
parents | 442fc117e886 |
children | 66b05c6077c7 |
line wrap: on
line diff
--- a/deep/stacked_dae/v_sylvain/train_error.py Tue Apr 27 08:41:58 2010 -0400 +++ b/deep/stacked_dae/v_sylvain/train_error.py Tue Apr 27 08:42:43 2010 -0400 @@ -112,6 +112,16 @@ optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples)) print "For" + str(maximum_exemples) + "over the PNIST07 training set: " optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples)) + + if os.path.exists(PATH+'params_finetune_PNIST07_then_NIST.txt'): + print ('\n finetune = PNIST07 then NIST') + optimizer.reload_parameters(PATH+'params_finetune_PNIST07_then_NIST.txt') + print "For" + str(maximum_exemples) + "over the NIST training set: " + optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples)) + print "For" + str(maximum_exemples) + "over the P07 training set: " + optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples)) + print "For" + str(maximum_exemples) + "over the PNIST07 training set: " + optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples)) channel.save()