# HG changeset patch # User Joseph Turian # Date 1215803624 14400 # Node ID faffaae0d2f9f210b9aa83b273afaacdaab8003f # Parent 3cd4cfda259905db93ff474b127f69b80344ed35 Autoassociator now seems to work diff -r 3cd4cfda2599 -r faffaae0d2f9 sandbox/simple_autoassociator/globals.py --- a/sandbox/simple_autoassociator/globals.py Fri Jul 11 01:33:27 2008 -0400 +++ b/sandbox/simple_autoassociator/globals.py Fri Jul 11 15:13:44 2008 -0400 @@ -5,8 +5,8 @@ #INPUT_DIMENSION = 1000 #INPUT_DIMENSION = 100 INPUT_DIMENSION = 4 -HIDDEN_DIMENSION = 10 -#HIDDEN_DIMENSION = 4 +#HIDDEN_DIMENSION = 10 +HIDDEN_DIMENSION = 1 LEARNING_RATE = 0.1 LR = LEARNING_RATE SEED = 666 diff -r 3cd4cfda2599 -r faffaae0d2f9 sandbox/simple_autoassociator/graph.py --- a/sandbox/simple_autoassociator/graph.py Fri Jul 11 01:33:27 2008 -0400 +++ b/sandbox/simple_autoassociator/graph.py Fri Jul 11 15:13:44 2008 -0400 @@ -17,10 +17,10 @@ loss_unsummed = binary_crossentropy(y, x) loss = t.sum(loss_unsummed) -(gw1, gb1, gw2, gb2, gy, gh) = t.grad(loss, [w1, b1, w2, b2, y, h]) +(gw1, gb1, gw2, gb2) = t.grad(loss, [w1, b1, w2, b2]) import theano.compile inputs = [x, w1, b1, w2, b2] -outputs = [y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2, gy, gh] +outputs = [y, h, loss, gw1, gb1, gw2, gb2] trainfn = theano.compile.function(inputs, outputs) diff -r 3cd4cfda2599 -r faffaae0d2f9 sandbox/simple_autoassociator/model.py --- a/sandbox/simple_autoassociator/model.py Fri Jul 11 01:33:27 2008 -0400 +++ b/sandbox/simple_autoassociator/model.py Fri Jul 11 15:13:44 2008 -0400 @@ -28,19 +28,16 @@ for idx in instance.keys(): x[idx] = instance[idx] - (y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2, gy, gh) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2) - print - print "instance:", instance - print "x:", x - print "OLD y:", y - print "OLD loss (unsummed):", loss_unsummed - print "gy:", gy - print "gh:", gh + (y, h, loss, gw1, gb1, gw2, gb2) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2) +# print +# print "instance:", instance +# print "x:", x +# print "OLD y:", y print "OLD total loss:", loss - print "gw1:", gw1 - print "gb1:", gb1 - print "gw2:", gw2 - print "gb2:", gb2 +# print "gw1:", gw1 +# print "gb1:", gb1 +# print "gw2:", gw2 +# print "gb2:", gb2 # SGD update self.parameters.w1 -= LR * gw1 @@ -49,10 +46,8 @@ self.parameters.b2 -= LR * gb2 # Recompute the loss, to make sure it's descreasing - (y, h, loss, loss_unsummed, gw1, gb1, gw2, gb2, gy, gh) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2) - print "NEW y:", y - print "NEW loss (unsummed):", loss_unsummed - print "gy:", gy + (y, h, loss, gw1, gb1, gw2, gb2) = trainfn(x, self.parameters.w1, self.parameters.b1, self.parameters.w2, self.parameters.b2) +# print "NEW y:", y print "NEW total loss:", loss - print "h:", h - print self.parameters +# print "h:", h +# print self.parameters diff -r 3cd4cfda2599 -r faffaae0d2f9 sandbox/simple_autoassociator/parameters.py --- a/sandbox/simple_autoassociator/parameters.py Fri Jul 11 01:33:27 2008 -0400 +++ b/sandbox/simple_autoassociator/parameters.py Fri Jul 11 15:13:44 2008 -0400 @@ -20,8 +20,8 @@ self.w1 = (numpy.random.rand(input_dimension, hidden_dimension)-0.5)/input_dimension self.w2 = (numpy.random.rand(hidden_dimension, input_dimension)-0.5)/hidden_dimension self.b1 = numpy.zeros(hidden_dimension) - #self.b2 = numpy.zeros(input_dimension) - self.b2 = numpy.array([10, 0, 0, -10]) + self.b2 = numpy.zeros(input_dimension) + #self.b2 = numpy.array([10, 0, 0, -10]) else: self.w1 = numpy.zeros((input_dimension, hidden_dimension)) self.w2 = numpy.zeros((hidden_dimension, input_dimension))