Mercurial > pylearn
annotate sandbox/rbm/model.py @ 419:43d9aa93934e
added other_ops.py to nnet_ops; added basic tests, no docs.
author | James Bergstra <bergstrj@iro.umontreal.ca> |
---|---|
date | Mon, 14 Jul 2008 16:48:02 -0400 |
parents | 4f61201fa9a9 |
children |
rev | line source |
---|---|
370
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
1 """ |
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
2 The model for an autoassociator for sparse inputs, using Ronan Collobert + Jason |
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
3 Weston's sampling trick (2008). |
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
4 """ |
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
5 |
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
6 import parameters |
372
75bab24bb2d8
Moved more logic into model.py
Joseph Turian <turian@gmail.com>
parents:
370
diff
changeset
|
7 |
370
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
8 import numpy |
395
70019965f888
Basic, broken RBM implementation
Joseph Turian <turian@gmail.com>
parents:
393
diff
changeset
|
9 from numpy import dot |
372
75bab24bb2d8
Moved more logic into model.py
Joseph Turian <turian@gmail.com>
parents:
370
diff
changeset
|
10 import random |
75bab24bb2d8
Moved more logic into model.py
Joseph Turian <turian@gmail.com>
parents:
370
diff
changeset
|
11 |
395
70019965f888
Basic, broken RBM implementation
Joseph Turian <turian@gmail.com>
parents:
393
diff
changeset
|
12 import pylearn.nnet_ops |
400
269d5c5a4209
Cleaned up, added sparse_instance
Joseph Turian <turian@gmail.com>
parents:
399
diff
changeset
|
13 import pylearn.sparse_instance |
395
70019965f888
Basic, broken RBM implementation
Joseph Turian <turian@gmail.com>
parents:
393
diff
changeset
|
14 |
70019965f888
Basic, broken RBM implementation
Joseph Turian <turian@gmail.com>
parents:
393
diff
changeset
|
15 def sigmoid(v): |
400
269d5c5a4209
Cleaned up, added sparse_instance
Joseph Turian <turian@gmail.com>
parents:
399
diff
changeset
|
16 """ |
269d5c5a4209
Cleaned up, added sparse_instance
Joseph Turian <turian@gmail.com>
parents:
399
diff
changeset
|
17 @todo: Move to pylearn.more_numpy |
269d5c5a4209
Cleaned up, added sparse_instance
Joseph Turian <turian@gmail.com>
parents:
399
diff
changeset
|
18 @todo: Fix to avoid floating point overflow. |
269d5c5a4209
Cleaned up, added sparse_instance
Joseph Turian <turian@gmail.com>
parents:
399
diff
changeset
|
19 """ |
395
70019965f888
Basic, broken RBM implementation
Joseph Turian <turian@gmail.com>
parents:
393
diff
changeset
|
20 # if x < -30.0: return 0.0 |
70019965f888
Basic, broken RBM implementation
Joseph Turian <turian@gmail.com>
parents:
393
diff
changeset
|
21 # if x > 30.0: return 1.0 |
70019965f888
Basic, broken RBM implementation
Joseph Turian <turian@gmail.com>
parents:
393
diff
changeset
|
22 return 1.0 / (1.0 + numpy.exp(-v)) |
70019965f888
Basic, broken RBM implementation
Joseph Turian <turian@gmail.com>
parents:
393
diff
changeset
|
23 |
70019965f888
Basic, broken RBM implementation
Joseph Turian <turian@gmail.com>
parents:
393
diff
changeset
|
24 def sample(v): |
400
269d5c5a4209
Cleaned up, added sparse_instance
Joseph Turian <turian@gmail.com>
parents:
399
diff
changeset
|
25 """ |
269d5c5a4209
Cleaned up, added sparse_instance
Joseph Turian <turian@gmail.com>
parents:
399
diff
changeset
|
26 @todo: Move to pylearn.more_numpy |
269d5c5a4209
Cleaned up, added sparse_instance
Joseph Turian <turian@gmail.com>
parents:
399
diff
changeset
|
27 """ |
396 | 28 assert len(v.shape) == 2 |
395
70019965f888
Basic, broken RBM implementation
Joseph Turian <turian@gmail.com>
parents:
393
diff
changeset
|
29 x = numpy.zeros(v.shape) |
396 | 30 for j in range(v.shape[0]): |
31 for i in range(v.shape[1]): | |
32 assert v[j][i] >= 0 and v[j][i] <= 1 | |
33 if random.random() < v[j][i]: x[j][i] = 1 | |
34 else: x[j][i] = 0 | |
395
70019965f888
Basic, broken RBM implementation
Joseph Turian <turian@gmail.com>
parents:
393
diff
changeset
|
35 return x |
70019965f888
Basic, broken RBM implementation
Joseph Turian <turian@gmail.com>
parents:
393
diff
changeset
|
36 |
398 | 37 def crossentropy(output, target): |
38 """ | |
39 Compute the crossentropy of binary output wrt binary target. | |
40 @note: We do not sum, crossentropy is computed by component. | |
41 @todo: Rewrite as a scalar, and then broadcast to tensor. | |
400
269d5c5a4209
Cleaned up, added sparse_instance
Joseph Turian <turian@gmail.com>
parents:
399
diff
changeset
|
42 @todo: Move to pylearn.more_numpy |
269d5c5a4209
Cleaned up, added sparse_instance
Joseph Turian <turian@gmail.com>
parents:
399
diff
changeset
|
43 @todo: Fix to avoid floating point overflow. |
398 | 44 """ |
45 return -(target * numpy.log(output) + (1 - target) * numpy.log(1 - output)) | |
46 | |
47 | |
370
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
48 class Model: |
400
269d5c5a4209
Cleaned up, added sparse_instance
Joseph Turian <turian@gmail.com>
parents:
399
diff
changeset
|
49 """ |
269d5c5a4209
Cleaned up, added sparse_instance
Joseph Turian <turian@gmail.com>
parents:
399
diff
changeset
|
50 @todo: input dimensions should be stored here! not as a global. |
269d5c5a4209
Cleaned up, added sparse_instance
Joseph Turian <turian@gmail.com>
parents:
399
diff
changeset
|
51 """ |
406
c2e6a8fcc35e
Globals are now parameters for the RBM model
Joseph Turian <turian@gmail.com>
parents:
405
diff
changeset
|
52 def __init__(self, input_dimension, hidden_dimension, learning_rate = 0.1, momentum = 0.9, weight_decay = 0.0002, random_seed = 666): |
c2e6a8fcc35e
Globals are now parameters for the RBM model
Joseph Turian <turian@gmail.com>
parents:
405
diff
changeset
|
53 self.input_dimension = input_dimension |
c2e6a8fcc35e
Globals are now parameters for the RBM model
Joseph Turian <turian@gmail.com>
parents:
405
diff
changeset
|
54 self.hidden_dimension = hidden_dimension |
c2e6a8fcc35e
Globals are now parameters for the RBM model
Joseph Turian <turian@gmail.com>
parents:
405
diff
changeset
|
55 self.learning_rate = learning_rate |
c2e6a8fcc35e
Globals are now parameters for the RBM model
Joseph Turian <turian@gmail.com>
parents:
405
diff
changeset
|
56 self.momentum = momentum |
c2e6a8fcc35e
Globals are now parameters for the RBM model
Joseph Turian <turian@gmail.com>
parents:
405
diff
changeset
|
57 self.weight_decay = weight_decay |
c2e6a8fcc35e
Globals are now parameters for the RBM model
Joseph Turian <turian@gmail.com>
parents:
405
diff
changeset
|
58 self.random_seed = random_seed |
c2e6a8fcc35e
Globals are now parameters for the RBM model
Joseph Turian <turian@gmail.com>
parents:
405
diff
changeset
|
59 |
c2e6a8fcc35e
Globals are now parameters for the RBM model
Joseph Turian <turian@gmail.com>
parents:
405
diff
changeset
|
60 random.seed(random_seed) |
c2e6a8fcc35e
Globals are now parameters for the RBM model
Joseph Turian <turian@gmail.com>
parents:
405
diff
changeset
|
61 |
417
4f61201fa9a9
Parameters are no longer global
Joseph Turian <turian@iro.umontreal.ca>
parents:
410
diff
changeset
|
62 self.parameters = parameters.Parameters(input_dimension=self.input_dimension, hidden_dimension=self.hidden_dimension, randomly_initialize=True, random_seed=self.random_seed) |
402 | 63 self.prev_dw = 0 |
64 self.prev_db = 0 | |
65 self.prev_dc = 0 | |
370
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
66 |
402 | 67 def deterministic_reconstruction(self, v0): |
68 """ | |
69 One up-down cycle, but a mean-field approximation (no sampling). | |
70 """ | |
71 q = sigmoid(self.parameters.b + dot(v0, self.parameters.w)) | |
72 p = sigmoid(self.parameters.c + dot(q, self.parameters.w.T)) | |
401 | 73 return p |
400
269d5c5a4209
Cleaned up, added sparse_instance
Joseph Turian <turian@gmail.com>
parents:
399
diff
changeset
|
74 |
402 | 75 def deterministic_reconstruction_error(self, v0): |
76 """ | |
77 @note: According to Yoshua, -log P(V1 = v0 | tilde(h)(v0)). | |
78 """ | |
79 return crossentropy(self.deterministic_reconstruction(v0), v0) | |
80 | |
399 | 81 def update(self, instances): |
372
75bab24bb2d8
Moved more logic into model.py
Joseph Turian <turian@gmail.com>
parents:
370
diff
changeset
|
82 """ |
75bab24bb2d8
Moved more logic into model.py
Joseph Turian <turian@gmail.com>
parents:
370
diff
changeset
|
83 Update the L{Model} using one training instance. |
75bab24bb2d8
Moved more logic into model.py
Joseph Turian <turian@gmail.com>
parents:
370
diff
changeset
|
84 @param instance: A dict from feature index to (non-zero) value. |
75bab24bb2d8
Moved more logic into model.py
Joseph Turian <turian@gmail.com>
parents:
370
diff
changeset
|
85 @todo: Should assert that nonzero_indices and zero_indices |
75bab24bb2d8
Moved more logic into model.py
Joseph Turian <turian@gmail.com>
parents:
370
diff
changeset
|
86 are correct (i.e. are truly nonzero/zero). |
406
c2e6a8fcc35e
Globals are now parameters for the RBM model
Joseph Turian <turian@gmail.com>
parents:
405
diff
changeset
|
87 @todo: Multiply L{self.weight_decay} by L{self.learning_rate}, as done in Semantic Hashing? |
405 | 88 @todo: Decay the biases too? |
372
75bab24bb2d8
Moved more logic into model.py
Joseph Turian <turian@gmail.com>
parents:
370
diff
changeset
|
89 """ |
399 | 90 minibatch = len(instances) |
406
c2e6a8fcc35e
Globals are now parameters for the RBM model
Joseph Turian <turian@gmail.com>
parents:
405
diff
changeset
|
91 v0 = pylearn.sparse_instance.to_vector(instances, self.input_dimension) |
410 | 92 print "old XENT per instance:", numpy.sum(self.deterministic_reconstruction_error(v0))/minibatch |
395
70019965f888
Basic, broken RBM implementation
Joseph Turian <turian@gmail.com>
parents:
393
diff
changeset
|
93 q0 = sigmoid(self.parameters.b + dot(v0, self.parameters.w)) |
70019965f888
Basic, broken RBM implementation
Joseph Turian <turian@gmail.com>
parents:
393
diff
changeset
|
94 h0 = sample(q0) |
70019965f888
Basic, broken RBM implementation
Joseph Turian <turian@gmail.com>
parents:
393
diff
changeset
|
95 p0 = sigmoid(self.parameters.c + dot(h0, self.parameters.w.T)) |
70019965f888
Basic, broken RBM implementation
Joseph Turian <turian@gmail.com>
parents:
393
diff
changeset
|
96 v1 = sample(p0) |
70019965f888
Basic, broken RBM implementation
Joseph Turian <turian@gmail.com>
parents:
393
diff
changeset
|
97 q1 = sigmoid(self.parameters.b + dot(v1, self.parameters.w)) |
402 | 98 |
406
c2e6a8fcc35e
Globals are now parameters for the RBM model
Joseph Turian <turian@gmail.com>
parents:
405
diff
changeset
|
99 dw = self.learning_rate * (dot(v0.T, h0) - dot(v1.T, q1)) / minibatch + self.momentum * self.prev_dw |
c2e6a8fcc35e
Globals are now parameters for the RBM model
Joseph Turian <turian@gmail.com>
parents:
405
diff
changeset
|
100 db = self.learning_rate * numpy.sum(h0 - q1, axis=0) / minibatch + self.momentum * self.prev_db |
c2e6a8fcc35e
Globals are now parameters for the RBM model
Joseph Turian <turian@gmail.com>
parents:
405
diff
changeset
|
101 dc = self.learning_rate * numpy.sum(v0 - v1, axis=0) / minibatch + self.momentum * self.prev_dc |
402 | 102 |
406
c2e6a8fcc35e
Globals are now parameters for the RBM model
Joseph Turian <turian@gmail.com>
parents:
405
diff
changeset
|
103 self.parameters.w *= (1 - self.weight_decay) |
405 | 104 |
402 | 105 self.parameters.w += dw |
106 self.parameters.b += db | |
107 self.parameters.c += dc | |
108 | |
109 self.last_dw = dw | |
110 self.last_db = db | |
111 self.last_dc = dc | |
112 | |
410 | 113 print "new XENT per instance:", numpy.sum(self.deterministic_reconstruction_error(v0))/minibatch |
402 | 114 |
115 # print | |
399 | 116 # print "v[0]:", v0 |
117 # print "Q(h[0][i] = 1 | v[0]):", q0 | |
118 # print "h[0]:", h0 | |
119 # print "P(v[1][j] = 1 | h[0]):", p0 | |
402 | 120 # print "XENT(P(v[1][j] = 1 | h[0]) | v0):", numpy.sum(crossentropy(p0, v0)) |
399 | 121 # print "v[1]:", v1 |
122 # print "Q(h[1][i] = 1 | v[1]):", q1 | |
402 | 123 # |
399 | 124 # print |
125 # print v0.T.shape | |
126 # print h0.shape | |
127 # print dot(v0.T, h0).shape | |
128 # print self.parameters.w.shape | |
406
c2e6a8fcc35e
Globals are now parameters for the RBM model
Joseph Turian <turian@gmail.com>
parents:
405
diff
changeset
|
129 # self.parameters.w += self.learning_rate * (dot(v0.T, h0) - dot(v1.T, q1)) / minibatch |
399 | 130 # print |
131 # print h0.shape | |
132 # print q1.shape | |
133 # print self.parameters.b.shape | |
406
c2e6a8fcc35e
Globals are now parameters for the RBM model
Joseph Turian <turian@gmail.com>
parents:
405
diff
changeset
|
134 # self.parameters.b += self.learning_rate * numpy.sum(h0 - q1, axis=0) / minibatch |
399 | 135 # print v0.shape, v1.shape |
136 # print | |
137 # print self.parameters.c.shape | |
406
c2e6a8fcc35e
Globals are now parameters for the RBM model
Joseph Turian <turian@gmail.com>
parents:
405
diff
changeset
|
138 # self.parameters.c += self.learning_rate * numpy.sum(v0 - v1, axis=0) / minibatch |
398 | 139 # print self.parameters |