view _test_onehotop.py @ 451:d99fefbc9324

Added a KL-divergence.
author Joseph Turian <turian@gmail.com>
date Thu, 04 Sep 2008 14:46:30 -0400
parents 18702ceb2096
children 844bad76459c
line wrap: on
line source

from onehotop import one_hot

import unittest
from theano import compile
from theano import gradient

from theano.tensor import as_tensor

import random
import numpy.random

class T_OneHot(unittest.TestCase):
    def test0(self):
        x = as_tensor([3, 2, 1])
        y = as_tensor(5)
        o = one_hot(x, y)
        y = compile.eval_outputs([o])
        self.failUnless(numpy.all(y == numpy.asarray([[0, 0, 0, 1, 0], [0, 0, 1, 0, 0], [0, 1, 0, 0, 0]])))

if __name__ == '__main__':
    unittest.main()