Mercurial > pylearn
changeset 392:e2cb8d489908
More debugging
author | Joseph Turian <turian@gmail.com> |
---|---|
date | Tue, 08 Jul 2008 18:45:35 -0400 |
parents | ec8aadb6694d |
children | 36baeb7125a4 |
files | simple_autoassociator/globals.py simple_autoassociator/graph.py simple_autoassociator/main.py simple_autoassociator/model.py |
diffstat | 4 files changed, 18 insertions(+), 13 deletions(-) [+] |
line wrap: on
line diff
--- a/simple_autoassociator/globals.py Tue Jul 08 17:41:45 2008 -0400 +++ b/simple_autoassociator/globals.py Tue Jul 08 18:45:35 2008 -0400 @@ -4,9 +4,9 @@ #INPUT_DIMENSION = 1000 #INPUT_DIMENSION = 100 -INPUT_DIMENSION = 10 -#HIDDEN_DIMENSION = 20 -HIDDEN_DIMENSION = 4 -LEARNING_RATE = 0.01 +INPUT_DIMENSION = 4 +HIDDEN_DIMENSION = 10 +#HIDDEN_DIMENSION = 4 +LEARNING_RATE = 0.1 LR = LEARNING_RATE SEED = 666
--- a/simple_autoassociator/graph.py Tue Jul 08 17:41:45 2008 -0400 +++ b/simple_autoassociator/graph.py Tue Jul 08 18:45:35 2008 -0400 @@ -17,10 +17,10 @@ loss_unsummed = binary_crossentropy(y, x) loss = t.sum(loss_unsummed) -(gw1, gb1, gw2, gb2) = t.grad(loss, [w1, b1, w2, b2]) +(gw1, gb1, gw2, gb2, gy) = t.grad(loss, [w1, b1, w2, b2, y]) import theano.compile inputs = [x, w1, b1, w2, b2] -outputs = [y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2] +outputs = [y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2, gy] trainfn = theano.compile.function(inputs, outputs)
--- a/simple_autoassociator/main.py Tue Jul 08 17:41:45 2008 -0400 +++ b/simple_autoassociator/main.py Tue Jul 08 18:45:35 2008 -0400 @@ -16,9 +16,12 @@ import numpy nonzero_instances = [] -nonzero_instances.append({1: 0.1, 5: 0.5, 9: 1}) -nonzero_instances.append({2: 0.3, 5: 0.5, 8: 0.8}) -#nonzero_instances.append({1: 0.2, 2: 0.3, 5: 0.5}) +nonzero_instances.append({0: 1, 1: 1}) +nonzero_instances.append({0: 1, 2: 1}) + +#nonzero_instances.append({1: 0.1, 5: 0.5, 9: 1}) +#nonzero_instances.append({2: 0.3, 5: 0.5, 8: 0.8}) +##nonzero_instances.append({1: 0.2, 2: 0.3, 5: 0.5}) import model model = model.Model()
--- a/simple_autoassociator/model.py Tue Jul 08 17:41:45 2008 -0400 +++ b/simple_autoassociator/model.py Tue Jul 08 18:45:35 2008 -0400 @@ -28,12 +28,13 @@ for idx in instance.keys(): x[idx] = instance[idx] - (y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2) + (y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2, gy) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2) print print "instance:", instance print "x:", x print "OLD y:", y - print "NEW loss (unsummed):", loss_unsummed + print "OLD loss (unsummed):", loss_unsummed + print "gy:", gy print "OLD total loss:", loss print "gw1:", gw1 print "gb1:", gb1 @@ -47,9 +48,10 @@ self.parameters.b2 -= LR * gb2 # Recompute the loss, to make sure it's descreasing - (y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2) + (y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2, gy) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2) print "NEW y:", y print "NEW loss (unsummed):", loss_unsummed + print "gy:", gy print "NEW total loss:", loss - print h + print "h:", h print self.parameters