view algorithms/_test_logistic_regression.py @ 485:e8c37244b54f

Small bugfix in regularization
author Joseph Turian <turian@gmail.com>
date Tue, 28 Oct 2008 01:37:32 -0400
parents bd937e845bbb
children c7ce66b4e8f4
line wrap: on
line source

from logistic_regression import *
import sys, time

if __name__ == '__main__':
    pprint.assign(nnet_ops.crossentropy_softmax_1hot_with_bias_dx, printing.FunctionPrinter('xsoftmaxdx'))
    pprint.assign(nnet_ops.crossentropy_softmax_argmax_1hot_with_bias, printing.FunctionPrinter('nll', 'softmax', 'argmax'))
    if 1:
        lrc = Module_Nclass()

        print '================'
        print lrc.update.pretty()
        print '================'
        print lrc.update.pretty(mode = theano.Mode('py', 'fast_run'))
        print '================'
#         print lrc.update.pretty(mode = compile.FAST_RUN.excluding('inplace'))
#         print '================'

#        sys.exit(0)

        lr = lrc.make(10, 2, mode=theano.Mode('c|py', 'fast_run'))
        #lr = lrc.make(10, 2, mode=compile.FAST_RUN.excluding('fast_run'))
        #lr = lrc.make(10, 2, mode=theano.Mode('py', 'merge')) #'FAST_RUN')

        data_x = N.random.randn(5, 10)
        data_y = (N.random.randn(5) > 0)

        t = time.time()
        for i in xrange(10000):
            lr.lr = 0.02
            xe = lr.update(data_x, data_y) 
            #if i % 100 == 0:
            #    print i, xe

        print 'training time:', time.time() - t
        print 'final error', xe

        #print
        #print 'TRAINED MODEL:'
        #print lr

    if 0:
        lrc = Module()

        lr = lrc.make(10, mode=theano.Mode('c|py', 'merge')) #'FAST_RUN')

        data_x = N.random.randn(5, 10)
        data_y = (N.random.randn(5, 1) > 0)

        for i in xrange(10000):
            xe = lr.update(data_x, data_y)
            if i % 100 == 0:
                print i, xe

        print
        print 'TRAINED MODEL:'
        print lr