annotate deep/stacked_dae/v_guillaume/stacked_dae.py @ 451:227ebc0be7ae

Add a graph for the NIST training set and normalize the values.
author Arnaud Bergeron <abergeron@gmail.com>
date Mon, 10 May 2010 13:44:11 -0400
parents 0ca069550abd
children
rev   line source
436
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
1 #!/usr/bin/python
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
2 # coding: utf-8
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
3
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
4 import numpy
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
5 import theano
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
6 import time
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
7 import theano.tensor as T
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
8 from theano.tensor.shared_randomstreams import RandomStreams
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
9 import copy
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
10
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
11 from utils import update_locals
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
12
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
13 # taken from LeDeepNet/daa.py
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
14 # has a special case when taking log(0) (defined =0)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
15 # modified to not take the mean anymore
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
16 from theano.tensor.xlogx import xlogx, xlogy0
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
17 # it's target*log(output)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
18 def binary_cross_entropy(target, output, sum_axis=1):
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
19 XE = xlogy0(target, output) + xlogy0((1 - target), (1 - output))
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
20 return -T.sum(XE, axis=sum_axis)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
21
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
22 class LogisticRegression(object):
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
23 def __init__(self, input, n_in, n_out):
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
24 # initialize with 0 the weights W as a matrix of shape (n_in, n_out)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
25 self.W = theano.shared( value=numpy.zeros((n_in,n_out),
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
26 dtype = theano.config.floatX) )
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
27 # initialize the baises b as a vector of n_out 0s
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
28 self.b = theano.shared( value=numpy.zeros((n_out,),
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
29 dtype = theano.config.floatX) )
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
30 # compute vector of class-membership. This is a sigmoid instead of
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
31 #a softmax to be able later to classify as nothing
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
32 self.p_y_given_x = T.nnet.softmax(T.dot(input, self.W)+self.b) #row-wise
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
33 ## self.p_y_given_x = T.nnet.sigmoid(T.dot(input, self.W)+self.b)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
34
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
35 # compute prediction as class whose probability is maximal in
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
36 # symbolic form
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
37 self.y_pred=T.argmax(self.p_y_given_x, axis=1)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
38
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
39 # list of parameters for this layer
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
40 self.params = [self.W, self.b]
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
41
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
42
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
43 def negative_log_likelihood(self, y):
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
44 return -T.mean(T.log(self.p_y_given_x)[T.arange(y.shape[0]),y])
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
45 ## return -T.mean(T.log(self.p_y_given_x)[T.arange(y.shape[0]),y]+T.sum(T.log(1-self.p_y_given_x), axis=1)-T.log(1-self.p_y_given_x)[T.arange(y.shape[0]),y])
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
46
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
47
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
48 ## def kullback_leibler(self,y):
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
49 ## return -T.mean(T.log(1/float(self.p_y_given_x))[T.arange(y.shape[0]),y])
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
50
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
51
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
52 def errors(self, y):
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
53 # check if y has same dimension of y_pred
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
54 if y.ndim != self.y_pred.ndim:
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
55 raise TypeError('y should have the same shape as self.y_pred',
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
56 ('y', target.type, 'y_pred', self.y_pred.type))
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
57
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
58 # check if y is of the correct datatype
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
59 if y.dtype.startswith('int'):
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
60 # the T.neq operator returns a vector of 0s and 1s, where 1
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
61 # represents a mistake in prediction
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
62 return T.mean(T.neq(self.y_pred, y))
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
63 else:
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
64 raise NotImplementedError()
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
65
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
66
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
67 class SigmoidalLayer(object):
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
68 def __init__(self, rng, input, n_in, n_out):
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
69 self.input = input
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
70
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
71 W_values = numpy.asarray( rng.uniform( \
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
72 low = -numpy.sqrt(6./(n_in+n_out)), \
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
73 high = numpy.sqrt(6./(n_in+n_out)), \
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
74 size = (n_in, n_out)), dtype = theano.config.floatX)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
75 self.W = theano.shared(value = W_values)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
76
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
77 b_values = numpy.zeros((n_out,), dtype= theano.config.floatX)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
78 self.b = theano.shared(value= b_values)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
79
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
80 self.output = T.nnet.sigmoid(T.dot(input, self.W) + self.b)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
81 self.params = [self.W, self.b]
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
82
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
83
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
84 class TanhLayer(object):
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
85 def __init__(self, rng, input, n_in, n_out):
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
86 self.input = input
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
87
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
88 W_values = numpy.asarray( rng.uniform( \
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
89 low = -numpy.sqrt(6./(n_in+n_out)), \
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
90 high = numpy.sqrt(6./(n_in+n_out)), \
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
91 size = (n_in, n_out)), dtype = theano.config.floatX)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
92 self.W = theano.shared(value = W_values)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
93
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
94 b_values = numpy.zeros((n_out,), dtype= theano.config.floatX)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
95 self.b = theano.shared(value= b_values)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
96
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
97 self.output = (T.tanh(T.dot(input, self.W) + self.b) + 1.0)/2.0
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
98 # ( *+ 1) /2 is because tanh goes from -1 to 1 and sigmoid goes from 0 to 1
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
99 # I want to use tanh, but the image has to stay the same. The correction is necessary.
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
100 self.params = [self.W, self.b]
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
101
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
102
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
103 class dA(object):
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
104 def __init__(self, n_visible= 784, n_hidden= 500, corruption_level = 0.1,\
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
105 input = None, shared_W = None, shared_b = None):
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
106 self.n_visible = n_visible
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
107 self.n_hidden = n_hidden
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
108
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
109 # create a Theano random generator that gives symbolic random values
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
110 theano_rng = RandomStreams()
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
111
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
112 if shared_W != None and shared_b != None :
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
113 self.W = shared_W
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
114 self.b = shared_b
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
115 else:
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
116 # initial values for weights and biases
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
117 # note : W' was written as `W_prime` and b' as `b_prime`
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
118
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
119 # W is initialized with `initial_W` which is uniformely sampled
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
120 # from -6./sqrt(n_visible+n_hidden) and 6./sqrt(n_hidden+n_visible)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
121 # the output of uniform if converted using asarray to dtype
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
122 # theano.config.floatX so that the code is runable on GPU
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
123 initial_W = numpy.asarray( numpy.random.uniform( \
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
124 low = -numpy.sqrt(6./(n_hidden+n_visible)), \
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
125 high = numpy.sqrt(6./(n_hidden+n_visible)), \
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
126 size = (n_visible, n_hidden)), dtype = theano.config.floatX)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
127 initial_b = numpy.zeros(n_hidden, dtype = theano.config.floatX)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
128
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
129
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
130 # theano shared variables for weights and biases
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
131 self.W = theano.shared(value = initial_W, name = "W")
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
132 self.b = theano.shared(value = initial_b, name = "b")
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
133
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
134
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
135 initial_b_prime= numpy.zeros(n_visible)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
136 # tied weights, therefore W_prime is W transpose
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
137 self.W_prime = self.W.T
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
138 self.b_prime = theano.shared(value = initial_b_prime, name = "b'")
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
139
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
140 # if no input is given, generate a variable representing the input
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
141 if input == None :
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
142 # we use a matrix because we expect a minibatch of several examples,
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
143 # each example being a row
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
144 self.x = T.matrix(name = 'input')
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
145 else:
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
146 self.x = input
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
147 # Equation (1)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
148 # keep 90% of the inputs the same and zero-out randomly selected subset of 10% of the inputs
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
149 # note : first argument of theano.rng.binomial is the shape(size) of
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
150 # random numbers that it should produce
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
151 # second argument is the number of trials
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
152 # third argument is the probability of success of any trial
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
153 #
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
154 # this will produce an array of 0s and 1s where 1 has a
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
155 # probability of 1 - ``corruption_level`` and 0 with
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
156 # ``corruption_level``
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
157 self.tilde_x = theano_rng.binomial( self.x.shape, 1, 1 - corruption_level, dtype=theano.config.floatX) * self.x
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
158 # Equation (2)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
159 # note : y is stored as an attribute of the class so that it can be
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
160 # used later when stacking dAs.
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
161
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
162 ## self.y = T.nnet.sigmoid(T.dot(self.tilde_x, self.W ) + self.b)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
163 ##
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
164 ## # Equation (3)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
165 ## #self.z = T.nnet.sigmoid(T.dot(self.y, self.W_prime) + self.b_prime)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
166 ## # Equation (4)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
167 ## # note : we sum over the size of a datapoint; if we are using minibatches,
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
168 ## # L will be a vector, with one entry per example in minibatch
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
169 ## #self.L = - T.sum( self.x*T.log(self.z) + (1-self.x)*T.log(1-self.z), axis=1 )
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
170 ## #self.L = binary_cross_entropy(target=self.x, output=self.z, sum_axis=1)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
171 ##
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
172 ## # bypassing z to avoid running to log(0)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
173 ## z_a = T.dot(self.y, self.W_prime) + self.b_prime
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
174 ## log_sigmoid = T.log(1.) - T.log(1.+T.exp(-z_a))
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
175 ## # log(1-sigmoid(z_a))
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
176 ## log_1_sigmoid = -z_a - T.log(1.+T.exp(-z_a))
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
177 ## self.L = -T.sum( self.x * (log_sigmoid) \
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
178 ## + (1.0-self.x) * (log_1_sigmoid), axis=1 )
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
179
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
180 # I added this epsilon to avoid getting log(0) and 1/0 in grad
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
181 # This means conceptually that there'd be no probability of 0, but that
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
182 # doesn't seem to me as important (maybe I'm wrong?).
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
183 #eps = 0.00000001
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
184 #eps_1 = 1-eps
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
185 #self.L = - T.sum( self.x * T.log(eps + eps_1*self.z) \
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
186 # + (1-self.x)*T.log(eps + eps_1*(1-self.z)), axis=1 )
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
187 # note : L is now a vector, where each element is the cross-entropy cost
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
188 # of the reconstruction of the corresponding example of the
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
189 # minibatch. We need to compute the average of all these to get
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
190 # the cost of the minibatch
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
191
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
192 #Or use a Tanh everything is always between 0 and 1, the range is
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
193 #changed so it remain the same as when sigmoid is used
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
194 self.y = (T.tanh(T.dot(self.tilde_x, self.W ) + self.b)+1.0)/2.0
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
195
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
196 self.z = (T.tanh(T.dot(self.y, self.W_prime) + self.b_prime)+1.0) / 2.0
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
197 #To ensure to do not have a log(0) operation
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
198 if self.z <= 0:
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
199 self.z = 0.000001
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
200 if self.z >= 1:
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
201 self.z = 0.999999
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
202
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
203 self.L = - T.sum( self.x*T.log(self.z) + (1.0-self.x)*T.log(1.0-self.z), axis=1 )
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
204
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
205 self.cost = T.mean(self.L)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
206
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
207 self.params = [ self.W, self.b, self.b_prime ]
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
208
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
209
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
210 class SdA(object):
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
211 def __init__(self, batch_size, n_ins,
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
212 hidden_layers_sizes, n_outs,
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
213 corruption_levels, rng, pretrain_lr, finetune_lr):
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
214 # Just to make sure those are not modified somewhere else afterwards
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
215 hidden_layers_sizes = copy.deepcopy(hidden_layers_sizes)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
216 corruption_levels = copy.deepcopy(corruption_levels)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
217
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
218 update_locals(self, locals())
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
219
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
220 self.layers = []
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
221 self.pretrain_functions = []
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
222 self.params = []
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
223 # MODIF: added this so we also get the b_primes
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
224 # (not used for finetuning... still using ".params")
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
225 self.all_params = []
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
226 self.n_layers = len(hidden_layers_sizes)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
227 self.logistic_params = []
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
228
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
229 print "Creating SdA with params:"
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
230 print "batch_size", batch_size
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
231 print "hidden_layers_sizes", hidden_layers_sizes
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
232 print "corruption_levels", corruption_levels
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
233 print "n_ins", n_ins
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
234 print "n_outs", n_outs
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
235 print "pretrain_lr", pretrain_lr
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
236 print "finetune_lr", finetune_lr
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
237 print "----"
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
238
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
239 if len(hidden_layers_sizes) < 1 :
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
240 raiseException (' You must have at least one hidden layer ')
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
241
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
242
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
243 # allocate symbolic variables for the data
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
244 #index = T.lscalar() # index to a [mini]batch
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
245 self.x = T.matrix('x') # the data is presented as rasterized images
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
246 self.y = T.ivector('y') # the labels are presented as 1D vector of
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
247 # [int] labels
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
248 self.finetune_lr = T.fscalar('finetune_lr') #To get a dynamic finetune learning rate
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
249 self.pretrain_lr = T.fscalar('pretrain_lr') #To get a dynamic pretrain learning rate
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
250
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
251 for i in xrange( self.n_layers ):
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
252 # construct the sigmoidal layer
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
253
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
254 # the size of the input is either the number of hidden units of
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
255 # the layer below or the input size if we are on the first layer
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
256 if i == 0 :
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
257 input_size = n_ins
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
258 else:
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
259 input_size = hidden_layers_sizes[i-1]
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
260
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
261 # the input to this layer is either the activation of the hidden
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
262 # layer below or the input of the SdA if you are on the first
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
263 # layer
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
264 if i == 0 :
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
265 layer_input = self.x
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
266 else:
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
267 layer_input = self.layers[-1].output
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
268 #We have to choose between sigmoidal layer or tanh layer !
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
269
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
270 ## layer = SigmoidalLayer(rng, layer_input, input_size,
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
271 ## hidden_layers_sizes[i] )
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
272
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
273 layer = TanhLayer(rng, layer_input, input_size,
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
274 hidden_layers_sizes[i] )
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
275 # add the layer to the
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
276 self.layers += [layer]
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
277 self.params += layer.params
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
278
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
279 # Construct a denoising autoencoder that shared weights with this
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
280 # layer
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
281 dA_layer = dA(input_size, hidden_layers_sizes[i], \
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
282 corruption_level = corruption_levels[0],\
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
283 input = layer_input, \
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
284 shared_W = layer.W, shared_b = layer.b)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
285
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
286 self.all_params += dA_layer.params
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
287
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
288 # Construct a function that trains this dA
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
289 # compute gradients of layer parameters
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
290 gparams = T.grad(dA_layer.cost, dA_layer.params)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
291 # compute the list of updates
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
292 updates = {}
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
293 for param, gparam in zip(dA_layer.params, gparams):
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
294 updates[param] = param - gparam * self.pretrain_lr
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
295
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
296 # create a function that trains the dA
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
297 update_fn = theano.function([self.x, self.pretrain_lr], dA_layer.cost, \
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
298 updates = updates)#,
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
299 # givens = {
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
300 # self.x : ensemble})
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
301 # collect this function into a list
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
302 #update_fn = theano.function([index], dA_layer.cost, \
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
303 # updates = updates,
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
304 # givens = {
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
305 # self.x : train_set_x[index*batch_size:(index+1)*batch_size] / self.shared_divider})
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
306 # collect this function into a list
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
307 self.pretrain_functions += [update_fn]
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
308
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
309
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
310 # We now need to add a logistic layer on top of the SDA
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
311 self.logLayer = LogisticRegression(\
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
312 input = self.layers[-1].output,\
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
313 n_in = hidden_layers_sizes[-1], n_out = n_outs)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
314
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
315 self.params += self.logLayer.params
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
316 self.all_params += self.logLayer.params
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
317 # construct a function that implements one step of finetunining
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
318
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
319 # compute the cost, defined as the negative log likelihood
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
320 cost = self.logLayer.negative_log_likelihood(self.y)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
321 # compute the gradients with respect to the model parameters
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
322 gparams = T.grad(cost, self.params)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
323 # compute list of updates
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
324 updates = {}
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
325 for param,gparam in zip(self.params, gparams):
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
326 updates[param] = param - gparam*self.finetune_lr
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
327
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
328 self.finetune = theano.function([self.x,self.y,self.finetune_lr], cost,
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
329 updates = updates)#,
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
330
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
331 # symbolic variable that points to the number of errors made on the
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
332 # minibatch given by self.x and self.y
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
333
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
334 self.errors = self.logLayer.errors(self.y)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
335
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
336
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
337 #STRUCTURE FOR THE FINETUNING OF THE LOGISTIC REGRESSION ON THE TOP WITH
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
338 #ALL HIDDEN LAYERS AS INPUT
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
339
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
340 all_h=[]
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
341 for i in xrange(self.n_layers):
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
342 all_h.append(self.layers[i].output)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
343 self.all_hidden=T.concatenate(all_h,axis=1)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
344
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
345
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
346 self.logLayer2 = LogisticRegression(\
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
347 input = self.all_hidden,\
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
348 n_in = sum(hidden_layers_sizes), n_out = n_outs)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
349 #n_in=hidden_layers_sizes[0],n_out=n_outs)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
350
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
351 #self.logistic_params+= self.logLayer2.params
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
352 # construct a function that implements one step of finetunining
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
353
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
354 self.logistic_params+=self.logLayer2.params
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
355 # compute the cost, defined as the negative log likelihood
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
356 cost2 = self.logLayer2.negative_log_likelihood(self.y)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
357 # compute the gradients with respect to the model parameters
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
358 gparams2 = T.grad(cost2, self.logistic_params)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
359
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
360 # compute list of updates
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
361 updates2 = {}
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
362 for param,gparam in zip(self.logistic_params, gparams2):
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
363 updates2[param] = param - gparam*finetune_lr
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
364
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
365 self.finetune2 = theano.function([self.x,self.y], cost2,
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
366 updates = updates2)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
367
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
368 # symbolic variable that points to the number of errors made on the
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
369 # minibatch given by self.x and self.y
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
370
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
371 self.errors2 = self.logLayer2.errors(self.y)
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
372
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
373
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
374 if __name__ == '__main__':
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
375 import sys
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
376 args = sys.argv[1:]
0ca069550abd Added : single class version of SDA
Guillaume Sicard <guitch21@gmail.com>
parents:
diff changeset
377