comparison simple_autoassociator/model.py @ 389:ec8aadb6694d

Renamed simple AA directory
author Joseph Turian <turian@gmail.com>
date Tue, 08 Jul 2008 17:41:45 -0400
parents simple_autoassociator.py/model.py@98ca97cc9910
children e2cb8d489908
comparison
equal deleted inserted replaced
388:98ca97cc9910 389:ec8aadb6694d
1 """
2 The model for an autoassociator for sparse inputs, using Ronan Collobert + Jason
3 Weston's sampling trick (2008).
4 """
5
6 from graph import trainfn
7 import parameters
8
9 import globals
10 from globals import LR
11
12 import numpy
13 import random
14 random.seed(globals.SEED)
15
16 class Model:
17 def __init__(self):
18 self.parameters = parameters.Parameters(randomly_initialize=True)
19
20 def update(self, instance):
21 """
22 Update the L{Model} using one training instance.
23 @param instance: A dict from feature index to (non-zero) value.
24 @todo: Should assert that nonzero_indices and zero_indices
25 are correct (i.e. are truly nonzero/zero).
26 """
27 x = numpy.zeros(globals.INPUT_DIMENSION)
28 for idx in instance.keys():
29 x[idx] = instance[idx]
30
31 (y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2)
32 print
33 print "instance:", instance
34 print "x:", x
35 print "OLD y:", y
36 print "NEW loss (unsummed):", loss_unsummed
37 print "OLD total loss:", loss
38 print "gw1:", gw1
39 print "gb1:", gb1
40 print "gw2:", gw2
41 print "gb2:", gb2
42
43 # SGD update
44 self.parameters.w1 -= LR * gw1
45 self.parameters.b1 -= LR * gb1
46 self.parameters.w2 -= LR * gw2
47 self.parameters.b2 -= LR * gb2
48
49 # Recompute the loss, to make sure it's descreasing
50 (y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2)
51 print "NEW y:", y
52 print "NEW loss (unsummed):", loss_unsummed
53 print "NEW total loss:", loss
54 print h
55 print self.parameters