diff sparse_random_autoassociator/graph.py @ 370:a1bbcde6b456

Moved sparse_random_autoassociator from my repository
author Joseph Turian <turian@gmail.com>
date Mon, 07 Jul 2008 01:54:46 -0400
parents
children e4473d9697d7
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sparse_random_autoassociator/graph.py	Mon Jul 07 01:54:46 2008 -0400
@@ -0,0 +1,40 @@
+"""
+Theano graph for an autoassociator for sparse inputs, which will be trained
+using Ronan Collobert + Jason Weston's sampling trick (2008).
+@todo: Make nearly everything private.
+"""
+
+from globals import MARGIN
+
+from pylearn.nnet_ops import sigmoid, crossentropy_softmax_1hot
+from theano import tensor as t
+from theano.tensor import dot
+xnonzero    = t.dvector()
+w1nonzero   = t.dmatrix()
+b1          = t.dvector()
+w2nonzero   = t.dmatrix()
+w2zero      = t.dmatrix()
+b2nonzero   = t.dvector()
+b2zero      = t.dvector()
+h           = sigmoid(dot(xnonzero, w1nonzero) + b1)
+ynonzero    = sigmoid(dot(h, w2nonzero) + b2nonzero)
+yzero       = sigmoid(dot(h, w2zero) + b2zero)
+
+# May want to weight loss wrt nonzero value? e.g. MARGIN violation for
+# 0.1 nonzero is not as bad as MARGIN violation for 0.2 nonzero.
+def hingeloss(MARGIN):
+    return -MARGIN * (MARGIN < 0)
+nonzeroloss = hingeloss(ynonzero - t.max(yzero) - MARGIN)
+zeroloss = hingeloss(-t.max(-(ynonzero)) - yzero - MARGIN)
+# xnonzero sensitive loss:
+#nonzeroloss = hingeloss(ynonzero - t.max(yzero) - MARGIN - xnonzero)
+#zeroloss = hingeloss(-t.max(-(ynonzero - xnonzero)) - yzero - MARGIN)
+loss = t.sum(nonzeroloss) + t.sum(zeroloss)
+
+(gw1nonzero, gb1, gw2nonzero, gw2zero, gb2nonzero, gb2zero) = t.grad(loss, [w1nonzero, b1, w2nonzero, w2zero, b2nonzero, b2zero])
+
+import theano.compile
+
+inputs  = [xnonzero, w1nonzero, b1, w2nonzero, w2zero, b2nonzero, b2zero]
+outputs = [ynonzero, yzero, loss, gw1nonzero, gb1, gw2nonzero, gw2zero, gb2nonzero, gb2zero]
+trainfn = theano.compile.function(inputs, outputs)