Mercurial > pylearn
view algorithms/_test_logistic_regression.py @ 518:4aa7f74ea93f
init dataset
author | James Bergstra <bergstrj@iro.umontreal.ca> |
---|---|
date | Wed, 12 Nov 2008 12:36:09 -0500 |
parents | c7ce66b4e8f4 |
children |
line wrap: on
line source
from logistic_regression import * import sys, time if __name__ == '__main__': pprint.assign(nnet.crossentropy_softmax_1hot_with_bias_dx, printing.FunctionPrinter('xsoftmaxdx')) pprint.assign(nnet.crossentropy_softmax_argmax_1hot_with_bias, printing.FunctionPrinter('nll', 'softmax', 'argmax')) if 1: lrc = Module_Nclass() print '================' print lrc.update.pretty() print '================' print lrc.update.pretty(mode = theano.Mode('py', 'fast_run')) print '================' # print lrc.update.pretty(mode = compile.FAST_RUN.excluding('inplace')) # print '================' # sys.exit(0) lr = lrc.make(10, 2, mode=theano.Mode('c|py', 'fast_run')) #lr = lrc.make(10, 2, mode=compile.FAST_RUN.excluding('fast_run')) #lr = lrc.make(10, 2, mode=theano.Mode('py', 'merge')) #'FAST_RUN') data_x = N.random.randn(5, 10) data_y = (N.random.randn(5) > 0) t = time.time() for i in xrange(10000): lr.lr = 0.02 xe = lr.update(data_x, data_y) #if i % 100 == 0: # print i, xe print 'training time:', time.time() - t print 'final error', xe #print #print 'TRAINED MODEL:' #print lr if 0: lrc = Module() lr = lrc.make(10, mode=theano.Mode('c|py', 'merge')) #'FAST_RUN') data_x = N.random.randn(5, 10) data_y = (N.random.randn(5, 1) > 0) for i in xrange(10000): xe = lr.update(data_x, data_y) if i % 100 == 0: print i, xe print print 'TRAINED MODEL:' print lr