Mercurial > pylearn
comparison algorithms/daa.py @ 487:94a4c5b7293b
DAA code more generic:
Can now choose activation function and reconstruction cost.
author | Joseph Turian <turian@gmail.com> |
---|---|
date | Tue, 28 Oct 2008 02:21:50 -0400 |
parents | 5ccb1662f9f6 |
children | e06666ac32d5 |
comparison
equal
deleted
inserted
replaced
486:5ccb1662f9f6 | 487:94a4c5b7293b |
---|---|
2 import theano | 2 import theano |
3 from theano import tensor as T | 3 from theano import tensor as T |
4 from theano.tensor import nnet as NN | 4 from theano.tensor import nnet as NN |
5 import numpy as N | 5 import numpy as N |
6 | 6 |
7 from pylearn import cost as cost | |
8 | |
7 class DenoisingAA(T.RModule): | 9 class DenoisingAA(T.RModule): |
8 | 10 |
9 def __init__(self, input = None, regularize = True, tie_weights = True): | 11 def __init__(self, input = None, regularize = True, tie_weights = True, |
12 activation_function=NN.sigmoid, reconstruction_cost_function=cost.cross_entropy): | |
13 """ | |
14 @param reconstruction_cost: Should return one cost per example (row) | |
15 """ | |
10 super(DenoisingAA, self).__init__() | 16 super(DenoisingAA, self).__init__() |
11 | 17 |
12 # MODEL CONFIGURATION | 18 # MODEL CONFIGURATION |
13 self.regularize = regularize | 19 self.regularize = regularize |
14 self.tie_weights = tie_weights | 20 self.tie_weights = tie_weights |
21 self.activation_function = activation_function | |
22 self.reconstruction_cost_function = reconstruction_cost_function | |
15 | 23 |
16 # ACQUIRE/MAKE INPUT | 24 # ACQUIRE/MAKE INPUT |
17 if not input: | 25 if not input: |
18 input = T.matrix('input') | 26 input = T.matrix('input') |
19 self.input = theano.External(input) | 27 self.input = theano.External(input) |
112 if seed is not None: | 120 if seed is not None: |
113 obj.seed(seed) | 121 obj.seed(seed) |
114 obj.__hide__ = ['params'] | 122 obj.__hide__ = ['params'] |
115 | 123 |
116 def build_regularization(self): | 124 def build_regularization(self): |
125 """ | |
126 @todo: Why do we need this function? | |
127 """ | |
117 return T.zero() # no regularization! | 128 return T.zero() # no regularization! |
118 | 129 |
119 | 130 |
120 class SigmoidXEDenoisingAA(DenoisingAA): | 131 class SigmoidXEDenoisingAA(DenoisingAA): |
132 """ | |
133 @todo: Merge this into the above. | |
134 """ | |
121 | 135 |
122 def build_corrupted_input(self): | 136 def build_corrupted_input(self): |
123 self.noise_level = theano.Member(T.scalar()) | 137 self.noise_level = theano.Member(T.scalar()) |
124 return self.random.binomial(T.shape(self.input), 1, 1 - self.noise_level) * self.input | 138 return self.random.binomial(T.shape(self.input), 1, 1 - self.noise_level) * self.input |
125 | 139 |
126 def hid_activation_function(self, activation): | 140 def hid_activation_function(self, activation): |
127 return NN.sigmoid(activation) | 141 return self.activation_function(activation) |
128 | 142 |
129 def out_activation_function(self, activation): | 143 def out_activation_function(self, activation): |
130 return NN.sigmoid(activation) | 144 return self.activation_function(activation) |
131 | 145 |
132 def build_reconstruction_costs(self, output): | 146 def build_reconstruction_costs(self, output): |
133 reconstruction_cost_matrix = -(self.input * T.log(output) + (1 - self.input) * T.log(1 - output)) | 147 return self.reconstruction_cost_function(self.input, output) |
134 return T.sum(reconstruction_cost_matrix, axis=1) | |
135 | 148 |
136 def build_regularization(self): | 149 def build_regularization(self): |
137 self.l2_coef = theano.Member(T.scalar()) | 150 self.l2_coef = theano.Member(T.scalar()) |
138 if self.tie_weights: | 151 if self.tie_weights: |
139 return self.l2_coef * T.sum(self.w1 * self.w1) | 152 return self.l2_coef * T.sum(self.w1 * self.w1) |