view _test_xlogx.py @ 487:94a4c5b7293b

DAA code more generic: Can now choose activation function and reconstruction cost.
author Joseph Turian <turian@gmail.com>
date Tue, 28 Oct 2008 02:21:50 -0400
parents 117e5b09cf31
children 242efecefd70
line wrap: on
line source

from xlogx import xlogx

import unittest
from theano import compile
from theano import gradient

from theano.tensor import as_tensor
import theano._test_tensor as TT

import random
import numpy.random

class T_XlogX(unittest.TestCase):
    def test0(self):
        x = as_tensor([1, 0])
        y = xlogx(x)
        y = compile.eval_outputs([y])
        self.failUnless(numpy.all(y == numpy.asarray([0, 0.])))
    def test1(self):
        class Dummy(object):
            def make_node(self, a):
                return [xlogx(a)[:,2]]
        TT.verify_grad(self, Dummy(), [numpy.random.rand(3,4)])


if __name__ == '__main__':
    unittest.main()