view algorithms/tests/test_daa.py @ 532:34ee3aff3e8f

Improved embedding word preprocessing.
author Joseph Turian <turian@gmail.com>
date Tue, 18 Nov 2008 02:57:50 -0500
parents 4fb6f7320518
children de974b4fc4ea
line wrap: on
line source

#!/usr/bin/python

from pylearn import algorithms as models
import theano
import numpy
import time

import pylearn.algorithms.logistic_regression

def test_train_daa(mode = theano.Mode('c|py', 'fast_run')):

    ndaa = 3
    daa = models.Stacker([(models.SigmoidXEDenoisingAA, 'hidden')] * ndaa + [(models.BinRegressor, 'output')],
                         regularize = False)

    model = daa.make([4, 20, 20, 20, 1],
                     lr = 0.01,
                     mode = mode,
                     seed = 10)

    model.layers[0].noise_level = 0.3
    model.layers[1].noise_level = 0.3
    model.layers[2].noise_level = 0.3

    # Update the first hidden layer
    for l in range(3):
        for i in range(10):
            model.local_update[l]([[0, 1, 0, 1]])
            model.local_update[l]([[1, 0, 1, 0]])

    for i in range(1):
        model.update([[0, 1, 0, 1]], [[1]])
        model.update([[1, 0, 1, 0]], [[0]])
    print model.classify([[0, 1, 0, 1]])
    print model.classify([[1, 0, 1, 0]])


def test_train_daa2(mode = theano.Mode('c|py', 'fast_run')):

    ndaa = 3
    daa = models.Stacker([(models.SigmoidXEDenoisingAA, 'hidden')] * ndaa + [(pylearn.algorithms.logistic_regression.Module_Nclass, 'pred')],
                         regularize = False)

    model = daa.make([4, 20, 20, 20, 10],
                     lr = 0.01,
                     mode = mode,
                     seed = 10)

    model.layers[0].noise_level = 0.3
    model.layers[1].noise_level = 0.3
    model.layers[2].noise_level = 0.3

    for l in range(3):
        for i in range(10):
            model.local_update[l]([[0, 1, 0, 1]])
            model.local_update[l]([[1, 0, 1, 0]])

    for i in range(1):
        model.update([[0, 1, 0, 1]], [1])
        model.update([[1, 0, 1, 0]], [0])
    print model.apply([[0, 1, 0, 1]])
    print model.apply([[1, 0, 1, 0]])




if __name__ == '__main__':
#    print 'optimized:'
#    t1 = test_train_daa(theano.Mode('py', 'fast_compile'))
#    t1 = test_train_daa(theano.Mode('c|py', 'fast_run'))
#    print 'time:',t1
#    print

#    print 'not optimized:'
#    t2 = test_train_daa(theano.Mode('c|py', 'fast_compile'))
##    print 'time:',t2

#    test_train_daa(theano.compile.Mode('c&py', 'merge'))
#    test_train_daa(theano.compile.Mode('c|py', 'merge'))
    test_train_daa(theano.compile.Mode('py', 'merge'))

    test_train_daa2(theano.compile.Mode('c|py', 'merge'))