Mercurial > ift6266
diff deep/stacked_dae/nist_sda.py @ 191:3632e6258642
Ajouts mineurs à stacked_dae, juste printé l'heure je crois.
author | fsavard |
---|---|
date | Tue, 02 Mar 2010 14:47:18 -0500 |
parents | d364a130b221 |
children | e656edaedb48 |
line wrap: on
line diff
--- a/deep/stacked_dae/nist_sda.py Tue Mar 02 09:52:27 2010 -0500 +++ b/deep/stacked_dae/nist_sda.py Tue Mar 02 14:47:18 2010 -0500 @@ -25,18 +25,13 @@ from sgd_optimization import SdaSgdOptimizer -SERIES_AVAILABLE = False -try: - from scalar_series import * - SERIES_AVAILABLE = True -except ImportError: - print "Could not import Series" +from ift6266.utils.scalar_series import * TEST_CONFIG = False NIST_ALL_LOCATION = '/data/lisa/data/nist/by_class/all' -JOBDB = 'postgres://ift6266h10@gershwin/ift6266h10_db/fsavard_sda2' +JOBDB = 'postgres://ift6266h10@gershwin/ift6266h10_sandbox_db/fsavard_sda2' REDUCE_TRAIN_TO = None MAX_FINETUNING_EPOCHS = 1000 @@ -58,15 +53,15 @@ 'num_hidden_layers':[2,3]} # Just useful for tests... minimal number of epochs -DEFAULT_HP_NIST = DD({'finetuning_lr':0.01, - 'pretraining_lr':0.01, - 'pretraining_epochs_per_layer':1, - 'max_finetuning_epochs':1, - 'hidden_layers_sizes':1000, +DEFAULT_HP_NIST = DD({'finetuning_lr':0.1, + 'pretraining_lr':0.1, + 'pretraining_epochs_per_layer':20, + 'max_finetuning_epochs':2, + 'hidden_layers_sizes':300, 'corruption_levels':0.2, 'minibatch_size':20, - 'reduce_train_to':1000, - 'num_hidden_layers':1}) + #'reduce_train_to':300, + 'num_hidden_layers':2}) def jobman_entrypoint(state, channel): pylearn.version.record_versions(state,[theano,ift6266,pylearn]) @@ -75,12 +70,10 @@ workingdir = os.getcwd() print "Will load NIST" - sys.stdout.flush() nist = NIST(20) print "NIST loaded" - sys.stdout.flush() rtt = None if state.has_key('reduce_train_to'): @@ -89,7 +82,7 @@ rtt = REDUCE_TRAIN_TO if rtt: - print "Reducing training set to ", rtt, " examples" + print "Reducing training set to "+str( rtt)+ " examples" nist.reduce_train_set(rtt) train,valid,test = nist.get_tvt() @@ -107,8 +100,9 @@ # b,b',W for each hidden layer + b,W of last layer (logreg) numparams = nhl * 3 + 2 series_mux = None - if SERIES_AVAILABLE: - series_mux = create_series(workingdir, numparams) + series_mux = create_series(workingdir, numparams) + + print "Creating optimizer with state, ", state optimizer = SdaSgdOptimizer(dataset=dataset, hyperparameters=state, \ n_ins=n_ins, n_outs=n_outs,\ @@ -275,7 +269,9 @@ jobman_insert_nist() elif len(args) > 0 and args[0] == 'test_jobman_entrypoint': - chanmock = DD({'COMPLETE':0}) + def f(): + pass + chanmock = DD({'COMPLETE':0,'save':f}) jobman_entrypoint(DEFAULT_HP_NIST, chanmock) elif len(args) > 0 and args[0] == 'estimate':