Mercurial > ift6266
comparison deep/stacked_dae/v_sylvain/nist_sda.py @ 354:ffc06af1c543
Ajout d'une fonctionnalite pour pouvoir avoir un taux d'apprentissage decroissant dans le pretrain
author | SylvainPL <sylvain.pannetier.lebeuf@umontreal.ca> |
---|---|
date | Wed, 21 Apr 2010 14:54:54 -0400 |
parents | 4306796d60a8 |
children | 87e684bfe538 |
comparison
equal
deleted
inserted
replaced
353:bc4464c0894c | 354:ffc06af1c543 |
---|---|
53 | 53 |
54 if state.has_key('decrease_lr'): | 54 if state.has_key('decrease_lr'): |
55 decrease_lr = state['decrease_lr'] | 55 decrease_lr = state['decrease_lr'] |
56 else : | 56 else : |
57 decrease_lr = 0 | 57 decrease_lr = 0 |
58 | |
59 if state.has_key('decrease_lr_pretrain'): | |
60 dec=state['decrease_lr_pretrain'] | |
61 else : | |
62 dec=0 | |
58 | 63 |
59 n_ins = 32*32 | 64 n_ins = 32*32 |
60 n_outs = 62 # 10 digits, 26*2 (lower, capitals) | 65 n_outs = 62 # 10 digits, 26*2 (lower, capitals) |
61 | 66 |
62 examples_per_epoch = NIST_ALL_TRAIN_SIZE | 67 examples_per_epoch = NIST_ALL_TRAIN_SIZE |
85 parameters=[] | 90 parameters=[] |
86 #Number of files of P07 used for pretraining | 91 #Number of files of P07 used for pretraining |
87 nb_file=0 | 92 nb_file=0 |
88 if state['pretrain_choice'] == 0: | 93 if state['pretrain_choice'] == 0: |
89 print('\n\tpretraining with NIST\n') | 94 print('\n\tpretraining with NIST\n') |
90 optimizer.pretrain(datasets.nist_all()) | 95 optimizer.pretrain(datasets.nist_all(), decrease = dec) |
91 elif state['pretrain_choice'] == 1: | 96 elif state['pretrain_choice'] == 1: |
92 #To know how many file will be used during pretraining | 97 #To know how many file will be used during pretraining |
93 nb_file = int(state['pretraining_epochs_per_layer']) | 98 nb_file = int(state['pretraining_epochs_per_layer']) |
94 state['pretraining_epochs_per_layer'] = 1 #Only 1 time over the dataset | 99 state['pretraining_epochs_per_layer'] = 1 #Only 1 time over the dataset |
95 if nb_file >=100: | 100 if nb_file >=100: |
96 sys.exit("The code does not support this much pretraining epoch (99 max with P07).\n"+ | 101 sys.exit("The code does not support this much pretraining epoch (99 max with P07).\n"+ |
97 "You have to correct the code (and be patient, P07 is huge !!)\n"+ | 102 "You have to correct the code (and be patient, P07 is huge !!)\n"+ |
98 "or reduce the number of pretraining epoch to run the code (better idea).\n") | 103 "or reduce the number of pretraining epoch to run the code (better idea).\n") |
99 print('\n\tpretraining with P07') | 104 print('\n\tpretraining with P07') |
100 optimizer.pretrain(datasets.nist_P07(min_file=0,max_file=nb_file)) | 105 optimizer.pretrain(datasets.nist_P07(min_file=0,max_file=nb_file),decrease = dec) |
101 channel.save() | 106 channel.save() |
102 | 107 |
103 #Set some of the parameters used for the finetuning | 108 #Set some of the parameters used for the finetuning |
104 if state.has_key('finetune_set'): | 109 if state.has_key('finetune_set'): |
105 finetune_choice=state['finetune_set'] | 110 finetune_choice=state['finetune_set'] |