view _test_onehotop.py @ 382:b4efd192d880

Moved xent loss to nnet_ups
author Joseph Turian <turian@gmail.com>
date Tue, 08 Jul 2008 01:58:16 -0400
parents 18702ceb2096
children 844bad76459c
line wrap: on
line source

from onehotop import one_hot

import unittest
from theano import compile
from theano import gradient

from theano.tensor import as_tensor

import random
import numpy.random

class T_OneHot(unittest.TestCase):
    def test0(self):
        x = as_tensor([3, 2, 1])
        y = as_tensor(5)
        o = one_hot(x, y)
        y = compile.eval_outputs([o])
        self.failUnless(numpy.all(y == numpy.asarray([[0, 0, 0, 1, 0], [0, 0, 1, 0, 0], [0, 1, 0, 0, 0]])))

if __name__ == '__main__':
    unittest.main()