view sparse_random_autoassociator/graph.py @ 382:b4efd192d880

Moved xent loss to nnet_ups
author Joseph Turian <turian@gmail.com>
date Tue, 08 Jul 2008 01:58:16 -0400
parents e4473d9697d7
children 344d1f874af7
line wrap: on
line source

"""
Theano graph for an autoassociator for sparse inputs, which will be trained
using Ronan Collobert + Jason Weston's sampling trick (2008).
@todo: Make nearly everything private.
"""

from globals import MARGIN

from pylearn.nnet_ops import sigmoid, crossentropy_softmax_1hot
from theano import tensor as t
from theano.tensor import dot
xnonzero    = t.dvector()
w1nonzero   = t.dmatrix()
b1          = t.dvector()
w2nonzero   = t.dmatrix()
w2zero      = t.dmatrix()
b2nonzero   = t.dvector()
b2zero      = t.dvector()
h           = sigmoid(dot(xnonzero, w1nonzero) + b1)
ynonzero    = sigmoid(dot(h, w2nonzero) + b2nonzero)
yzero       = sigmoid(dot(h, w2zero) + b2zero)

# May want to weight loss wrt nonzero value? e.g. MARGIN violation for
# 0.1 nonzero is not as bad as MARGIN violation for 0.2 nonzero.
def hingeloss(MARGIN):
    return -MARGIN * (MARGIN < 0)
nonzeroloss = hingeloss(ynonzero - t.max(yzero) - MARGIN)
zeroloss = hingeloss(-t.max(-(ynonzero)) - yzero - MARGIN)
# xnonzero sensitive loss:
#nonzeroloss = hingeloss(ynonzero - t.max(yzero) - MARGIN - xnonzero)
#zeroloss = hingeloss(-t.max(-(ynonzero - xnonzero)) - yzero - MARGIN)
loss = t.sum(nonzeroloss) + t.sum(zeroloss)

#loss = t.sum(binary_crossentropy(ynonzero, xnonzero)) + t.sum(binary_crossentropy(yzero, t.constant(0)))

(gw1nonzero, gb1, gw2nonzero, gw2zero, gb2nonzero, gb2zero) = t.grad(loss, [w1nonzero, b1, w2nonzero, w2zero, b2nonzero, b2zero])

import theano.compile

inputs  = [xnonzero, w1nonzero, b1, w2nonzero, w2zero, b2nonzero, b2zero]
outputs = [ynonzero, yzero, loss, gw1nonzero, gb1, gw2nonzero, gw2zero, gb2nonzero, gb2zero]
trainfn = theano.compile.function(inputs, outputs)