Mercurial > pylearn
annotate simple_autoassociator.py/model.py @ 386:a474341861fa
Added a simple AA
author | Joseph Turian <turian@gmail.com> |
---|---|
date | Tue, 08 Jul 2008 02:27:00 -0400 |
parents | sparse_random_autoassociator/model.py@42cc94cf6c12 |
children | 98ca97cc9910 |
rev | line source |
---|---|
370
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
1 """ |
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
2 The model for an autoassociator for sparse inputs, using Ronan Collobert + Jason |
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
3 Weston's sampling trick (2008). |
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
4 """ |
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
5 |
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
6 from graph import trainfn |
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
7 import parameters |
372
75bab24bb2d8
Moved more logic into model.py
Joseph Turian <turian@gmail.com>
parents:
370
diff
changeset
|
8 |
75bab24bb2d8
Moved more logic into model.py
Joseph Turian <turian@gmail.com>
parents:
370
diff
changeset
|
9 import globals |
75bab24bb2d8
Moved more logic into model.py
Joseph Turian <turian@gmail.com>
parents:
370
diff
changeset
|
10 from globals import LR |
75bab24bb2d8
Moved more logic into model.py
Joseph Turian <turian@gmail.com>
parents:
370
diff
changeset
|
11 |
370
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
12 import numpy |
372
75bab24bb2d8
Moved more logic into model.py
Joseph Turian <turian@gmail.com>
parents:
370
diff
changeset
|
13 import random |
75bab24bb2d8
Moved more logic into model.py
Joseph Turian <turian@gmail.com>
parents:
370
diff
changeset
|
14 random.seed(globals.SEED) |
75bab24bb2d8
Moved more logic into model.py
Joseph Turian <turian@gmail.com>
parents:
370
diff
changeset
|
15 |
370
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
16 class Model: |
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
17 def __init__(self): |
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
18 self.parameters = parameters.Parameters(randomly_initialize=True) |
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
19 |
372
75bab24bb2d8
Moved more logic into model.py
Joseph Turian <turian@gmail.com>
parents:
370
diff
changeset
|
20 def update(self, instance): |
75bab24bb2d8
Moved more logic into model.py
Joseph Turian <turian@gmail.com>
parents:
370
diff
changeset
|
21 """ |
75bab24bb2d8
Moved more logic into model.py
Joseph Turian <turian@gmail.com>
parents:
370
diff
changeset
|
22 Update the L{Model} using one training instance. |
75bab24bb2d8
Moved more logic into model.py
Joseph Turian <turian@gmail.com>
parents:
370
diff
changeset
|
23 @param instance: A dict from feature index to (non-zero) value. |
75bab24bb2d8
Moved more logic into model.py
Joseph Turian <turian@gmail.com>
parents:
370
diff
changeset
|
24 @todo: Should assert that nonzero_indices and zero_indices |
75bab24bb2d8
Moved more logic into model.py
Joseph Turian <turian@gmail.com>
parents:
370
diff
changeset
|
25 are correct (i.e. are truly nonzero/zero). |
75bab24bb2d8
Moved more logic into model.py
Joseph Turian <turian@gmail.com>
parents:
370
diff
changeset
|
26 """ |
386 | 27 x = numpy.zeros(globals.INPUT_DIMENSION) |
28 for idx in instance.keys(): | |
29 x[idx] = instance[idx] | |
30 | |
31 (y, loss, gw1, gb1, gw2, gb2) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2) | |
370
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
32 print |
386 | 33 print "instance:", instance |
34 print "OLD y:", y | |
370
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
35 print "OLD total loss:", loss |
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
36 |
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
37 # SGD update |
386 | 38 self.parameters.w1 -= LR * gw1 |
39 self.parameters.b1 -= LR * gb1 | |
40 self.parameters.w2 -= LR * gw2 | |
41 self.parameters.b2 -= LR * gb2 | |
370
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
42 |
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
43 # Recompute the loss, to make sure it's descreasing |
386 | 44 (y, loss, gw1, gb1, gw2, gb2) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2) |
45 print "NEW y:", y | |
370
a1bbcde6b456
Moved sparse_random_autoassociator from my repository
Joseph Turian <turian@gmail.com>
parents:
diff
changeset
|
46 print "NEW total loss:", loss |