comparison sandbox/simple_autoassociator/model.py @ 393:36baeb7125a4

Made sandbox directory
author Joseph Turian <turian@gmail.com>
date Tue, 08 Jul 2008 18:46:26 -0400
parents simple_autoassociator/model.py@e2cb8d489908
children 8cc11ac97087
comparison
equal deleted inserted replaced
392:e2cb8d489908 393:36baeb7125a4
1 """
2 The model for an autoassociator for sparse inputs, using Ronan Collobert + Jason
3 Weston's sampling trick (2008).
4 """
5
6 from graph import trainfn
7 import parameters
8
9 import globals
10 from globals import LR
11
12 import numpy
13 import random
14 random.seed(globals.SEED)
15
16 class Model:
17 def __init__(self):
18 self.parameters = parameters.Parameters(randomly_initialize=True)
19
20 def update(self, instance):
21 """
22 Update the L{Model} using one training instance.
23 @param instance: A dict from feature index to (non-zero) value.
24 @todo: Should assert that nonzero_indices and zero_indices
25 are correct (i.e. are truly nonzero/zero).
26 """
27 x = numpy.zeros(globals.INPUT_DIMENSION)
28 for idx in instance.keys():
29 x[idx] = instance[idx]
30
31 (y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2, gy) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2)
32 print
33 print "instance:", instance
34 print "x:", x
35 print "OLD y:", y
36 print "OLD loss (unsummed):", loss_unsummed
37 print "gy:", gy
38 print "OLD total loss:", loss
39 print "gw1:", gw1
40 print "gb1:", gb1
41 print "gw2:", gw2
42 print "gb2:", gb2
43
44 # SGD update
45 self.parameters.w1 -= LR * gw1
46 self.parameters.b1 -= LR * gb1
47 self.parameters.w2 -= LR * gw2
48 self.parameters.b2 -= LR * gb2
49
50 # Recompute the loss, to make sure it's descreasing
51 (y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2, gy) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2)
52 print "NEW y:", y
53 print "NEW loss (unsummed):", loss_unsummed
54 print "gy:", gy
55 print "NEW total loss:", loss
56 print "h:", h
57 print self.parameters