# HG changeset patch # User Joseph Turian # Date 1215564536 14400 # Node ID 6e55ccb7e2bfabbb3b11c47c053e51187d1922de # Parent 25a3212287cdedaa3a3126f9b207eacecf1f8ce0 Better output diff -r 25a3212287cd -r 6e55ccb7e2bf sandbox/rbm/globals.py --- a/sandbox/rbm/globals.py Tue Jul 08 20:39:36 2008 -0400 +++ b/sandbox/rbm/globals.py Tue Jul 08 20:48:56 2008 -0400 @@ -2,11 +2,10 @@ Global variables. """ -#INPUT_DIMENSION = 1000 +INPUT_DIMENSION = 1000 #INPUT_DIMENSION = 100 -INPUT_DIMENSION = 4 -#HIDDEN_DIMENSION = 10 -HIDDEN_DIMENSION = 6 +HIDDEN_DIMENSION = 20 +#HIDDEN_DIMENSION = 6 LEARNING_RATE = 0.1 LR = LEARNING_RATE SEED = 666 diff -r 25a3212287cd -r 6e55ccb7e2bf sandbox/rbm/model.py --- a/sandbox/rbm/model.py Tue Jul 08 20:39:36 2008 -0400 +++ b/sandbox/rbm/model.py Tue Jul 08 20:48:56 2008 -0400 @@ -30,6 +30,15 @@ else: x[j][i] = 0 return x +def crossentropy(output, target): + """ + Compute the crossentropy of binary output wrt binary target. + @note: We do not sum, crossentropy is computed by component. + @todo: Rewrite as a scalar, and then broadcast to tensor. + """ + return -(target * numpy.log(output) + (1 - target) * numpy.log(1 - output)) + + class Model: def __init__(self): self.parameters = parameters.Parameters(randomly_initialize=True) @@ -55,11 +64,11 @@ print "Q(h[0][i] = 1 | v[0]):", q0 print "h[0]:", h0 print "P(v[1][j] = 1 | h[0]):", p0 + print "XENT(P(v[1][j] = 1 | h[0]) | v0):", numpy.sum(crossentropy(p0, v0)) print "v[1]:", v1 print "Q(h[1][i] = 1 | v[1]):", q1 - print self.parameters.w.shape self.parameters.w += LR * (dot(v0.T, h0) - dot(v1.T, q1)) self.parameters.b += LR * (h0 - q1) self.parameters.c += LR * (v0 - v1) - print self.parameters +# print self.parameters