comparison deep/stacked_dae/mnist_sda.py @ 167:1f5937e9e530

More moves - transformations into data_generation, added "deep" folder
author Dumitru Erhan <dumitru.erhan@gmail.com>
date Fri, 26 Feb 2010 14:15:38 -0500
parents scripts/stacked_dae/mnist_sda.py@7d8366fb90bf
children 3632e6258642
comparison
equal deleted inserted replaced
166:17ae5a1a4dd1 167:1f5937e9e530
1 #!/usr/bin/python
2 # coding: utf-8
3
4 # Parameterize call to sgd_optimization for MNIST
5
6 import numpy
7 import theano
8 import time
9 import theano.tensor as T
10 from theano.tensor.shared_randomstreams import RandomStreams
11
12 from sgd_optimization import SdaSgdOptimizer
13 import cPickle, gzip
14 from jobman import DD
15
16 MNIST_LOCATION = '/u/savardf/datasets/mnist.pkl.gz'
17
18 def sgd_optimization_mnist(learning_rate=0.1, pretraining_epochs = 2, \
19 pretrain_lr = 0.1, training_epochs = 5, \
20 dataset='mnist.pkl.gz'):
21 # Load the dataset
22 f = gzip.open(dataset,'rb')
23 # this gives us train, valid, test (each with .x, .y)
24 dataset = cPickle.load(f)
25 f.close()
26
27 n_ins = 28*28
28 n_outs = 10
29
30 hyperparameters = DD({'finetuning_lr':learning_rate,
31 'pretraining_lr':pretrain_lr,
32 'pretraining_epochs_per_layer':pretraining_epochs,
33 'max_finetuning_epochs':training_epochs,
34 'hidden_layers_sizes':[100],
35 'corruption_levels':[0.2],
36 'minibatch_size':20})
37
38 optimizer = SdaSgdOptimizer(dataset, hyperparameters, n_ins, n_outs)
39 optimizer.pretrain()
40 optimizer.finetune()
41
42 if __name__ == '__main__':
43 sgd_optimization_mnist(dataset=MNIST_LOCATION)
44