# HG changeset patch # User xaviermuller # Date 1270066015 14400 # Node ID 1e4bf5a5b46d08a9509390c9dbcdbbe15b8bf7ea # Parent ef28cbb5f4649750409e837d86d9d562775869c1 added type 2 adaptive learning configurable learning weight + versionning diff -r ef28cbb5f464 -r 1e4bf5a5b46d baseline/mlp/mlp_nist.py --- a/baseline/mlp/mlp_nist.py Wed Mar 31 15:54:47 2010 -0400 +++ b/baseline/mlp/mlp_nist.py Wed Mar 31 16:06:55 2010 -0400 @@ -31,7 +31,7 @@ import time import theano.tensor.nnet import pylearn -import theano,pylearn.version +import theano,pylearn.version,ift6266 from pylearn.io import filetensor as ft data_path = '/data/lisa/data/nist/by_class/' @@ -163,7 +163,7 @@ raise NotImplementedError() -def mlp_full_nist( verbose = False,\ +def mlp_full_nist( verbose = 1,\ adaptive_lr = 0,\ train_data = 'all/all_train_data.ft',\ train_labels = 'all/all_train_labels.ft',\ @@ -176,7 +176,8 @@ batch_size=20,\ nb_hidden = 500,\ nb_targets = 62, - tau=1e6): + tau=1e6,\ + lr_t2_factor=0.5): configuration = [learning_rate,nb_max_exemples,nb_hidden,adaptive_lr] @@ -217,7 +218,7 @@ train_size*=batch_size validation_size =test_size offset = train_size-test_size - if verbose == True: + if verbose == 1: print 'train size = %d' %train_size print 'test size = %d' %test_size print 'valid size = %d' %validation_size @@ -248,7 +249,7 @@ y = T.lvector() # the labels are presented as 1D vector of # [long int] labels - if verbose==True: + if verbose==1: print 'finished parsing the data' # construct the logistic regression class classifier = MLP( input=x.reshape((batch_size,32*32)),\ @@ -325,7 +326,7 @@ - if verbose == True: + if verbose == 1: print 'looping at most %d times through the data set' %n_iter for iter in xrange(n_iter* n_minibatches): @@ -371,7 +372,7 @@ if(this_train_loss