comparison algorithms/aa.py @ 476:8fcd0f3d9a17

added a few algorithms
author Olivier Breuleux <breuleuo@iro.umontreal.ca>
date Mon, 27 Oct 2008 17:26:00 -0400
parents
children
comparison
equal deleted inserted replaced
475:11e0357f06f4 476:8fcd0f3d9a17
1
2 import theano
3 from theano import tensor as T
4 from theano.tensor import nnet as NN
5 import numpy as N
6
7 class AutoEncoder(theano.FancyModule):
8
9 def __init__(self, input = None, regularize = True, tie_weights = True):
10 super(AutoEncoder, self).__init__()
11
12 # MODEL CONFIGURATION
13 self.regularize = regularize
14 self.tie_weights = tie_weights
15
16 # ACQUIRE/MAKE INPUT
17 if not input:
18 input = T.matrix('input')
19 self.input = theano.External(input)
20
21 # HYPER-PARAMETERS
22 self.lr = theano.Member(T.scalar())
23
24 # PARAMETERS
25 self.w1 = theano.Member(T.matrix())
26 if not tie_weights:
27 self.w2 = theano.Member(T.matrix())
28 else:
29 self.w2 = self.w1.T
30 self.b1 = theano.Member(T.vector())
31 self.b2 = theano.Member(T.vector())
32
33 # HIDDEN LAYER
34 self.hidden_activation = T.dot(input, self.w1) + self.b1
35 self.hidden = self.build_hidden()
36
37 # RECONSTRUCTION LAYER
38 self.output_activation = T.dot(self.hidden, self.w2) + self.b2
39 self.output = self.build_output()
40
41 # RECONSTRUCTION COST
42 self.reconstruction_cost = self.build_reconstruction_cost()
43
44 # REGULARIZATION COST
45 self.regularization = self.build_regularization()
46
47 # TOTAL COST
48 self.cost = self.reconstruction_cost
49 if self.regularize:
50 self.cost = self.cost + self.regularization
51
52 # GRADIENTS AND UPDATES
53 if self.tie_weights:
54 self.params = self.w1, self.b1, self.b2
55 else:
56 self.params = self.w1, self.w2, self.b1, self.b2
57 gradients = T.grad(self.cost, self.params)
58 updates = dict((p, p - self.lr * g) for p, g in zip(self.params, gradients))
59
60 # INTERFACE METHODS
61 self.update = theano.Method(input, self.cost, updates)
62 self.reconstruction = theano.Method(input, self.output)
63 self.representation = theano.Method(input, self.hidden)
64
65 def _instance_initialize(self, obj, input_size = None, hidden_size = None, seed = None, **init):
66 if (input_size is None) ^ (hidden_size is None):
67 raise ValueError("Must specify hidden_size and target_size or neither.")
68 super(AutoEncoder, self)._instance_initialize(obj, **init)
69 if seed is not None:
70 R = N.random.RandomState(seed)
71 else:
72 R = N.random
73 if input_size is not None:
74 sz = (input_size, hidden_size)
75 range = 1/N.sqrt(input_size)
76 obj.w1 = R.uniform(size = sz, low = -range, high = range)
77 if not self.tie_weights:
78 obj.w2 = R.uniform(size = list(reversed(sz)), low = -range, high = range)
79 obj.b1 = N.zeros(hidden_size)
80 obj.b2 = N.zeros(input_size)
81
82 def build_regularization(self):
83 return T.zero() # no regularization!
84
85
86 class SigmoidXEAutoEncoder(AutoEncoder):
87
88 def build_hidden(self):
89 return NN.sigmoid(self.hidden_activation)
90
91 def build_output(self):
92 return NN.sigmoid(self.output_activation)
93
94 def build_reconstruction_cost(self):
95 self.reconstruction_cost_matrix = self.input * T.log(self.output) + (1.0 - self.input) * T.log(1.0 - self.output)
96 self.reconstruction_costs = -T.sum(self.reconstruction_cost_matrix, axis=1)
97 return T.sum(self.reconstruction_costs)
98
99 def build_regularization(self):
100 self.l2_coef = theano.Member(T.scalar())
101 if self.tie_weights:
102 return self.l2_coef * T.sum(self.w1 * self.w1)
103 else:
104 return self.l2_coef * T.sum(self.w1 * self.w1) + T.sum(self.w2 * self.w2)
105
106 def _instance_initialize(self, obj, input_size = None, hidden_size = None, **init):
107 init.setdefault('l2_coef', 0)
108 super(SigmoidXEAutoEncoder, self)._instance_initialize(obj, input_size, hidden_size, **init)