Mercurial > pylearn
changeset 404:8cc11ac97087
Debugging simple AA a bit
author | Joseph Turian <turian@gmail.com> |
---|---|
date | Thu, 10 Jul 2008 00:51:32 -0400 |
parents | 273e5c03003e |
children | be4209cd568f |
files | sandbox/rbm/globals.py sandbox/simple_autoassociator/graph.py sandbox/simple_autoassociator/model.py sandbox/simple_autoassociator/parameters.py |
diffstat | 4 files changed, 9 insertions(+), 7 deletions(-) [+] |
line wrap: on
line diff
--- a/sandbox/rbm/globals.py Wed Jul 09 17:55:46 2008 -0400 +++ b/sandbox/rbm/globals.py Thu Jul 10 00:51:32 2008 -0400 @@ -4,8 +4,8 @@ INPUT_DIMENSION = 10 #INPUT_DIMENSION = 100 -#HIDDEN_DIMENSION = 100 -HIDDEN_DIMENSION = 10 +HIDDEN_DIMENSION = 100 +#HIDDEN_DIMENSION = 10 #HIDDEN_DIMENSION = 6 LEARNING_RATE = 0.1 LR = LEARNING_RATE
--- a/sandbox/simple_autoassociator/graph.py Wed Jul 09 17:55:46 2008 -0400 +++ b/sandbox/simple_autoassociator/graph.py Thu Jul 10 00:51:32 2008 -0400 @@ -17,10 +17,10 @@ loss_unsummed = binary_crossentropy(y, x) loss = t.sum(loss_unsummed) -(gw1, gb1, gw2, gb2, gy) = t.grad(loss, [w1, b1, w2, b2, y]) +(gw1, gb1, gw2, gb2, gy, gh) = t.grad(loss, [w1, b1, w2, b2, y, h]) import theano.compile inputs = [x, w1, b1, w2, b2] -outputs = [y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2, gy] +outputs = [y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2, gy, gh] trainfn = theano.compile.function(inputs, outputs)
--- a/sandbox/simple_autoassociator/model.py Wed Jul 09 17:55:46 2008 -0400 +++ b/sandbox/simple_autoassociator/model.py Thu Jul 10 00:51:32 2008 -0400 @@ -28,13 +28,14 @@ for idx in instance.keys(): x[idx] = instance[idx] - (y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2, gy) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2) + (y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2, gy, gh) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2) print print "instance:", instance print "x:", x print "OLD y:", y print "OLD loss (unsummed):", loss_unsummed print "gy:", gy + print "gh:", gh print "OLD total loss:", loss print "gw1:", gw1 print "gb1:", gb1 @@ -48,7 +49,7 @@ self.parameters.b2 -= LR * gb2 # Recompute the loss, to make sure it's descreasing - (y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2, gy) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2) + (y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2, gy, gh) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2) print "NEW y:", y print "NEW loss (unsummed):", loss_unsummed print "gy:", gy
--- a/sandbox/simple_autoassociator/parameters.py Wed Jul 09 17:55:46 2008 -0400 +++ b/sandbox/simple_autoassociator/parameters.py Thu Jul 10 00:51:32 2008 -0400 @@ -20,7 +20,8 @@ self.w1 = (numpy.random.rand(input_dimension, hidden_dimension)-0.5)/input_dimension self.w2 = (numpy.random.rand(hidden_dimension, input_dimension)-0.5)/hidden_dimension self.b1 = numpy.zeros(hidden_dimension) - self.b2 = numpy.zeros(input_dimension) + #self.b2 = numpy.zeros(input_dimension) + self.b2 = numpy.array([10, 0, 0, -10]) else: self.w1 = numpy.zeros((input_dimension, hidden_dimension)) self.w2 = numpy.zeros((hidden_dimension, input_dimension))