view pylearn/shared/layers/tests/test_sigmoidal_layer.py @ 893:912be602c3ac

fix import for the move of shared var outside of the sandbox.
author Frederic Bastien <nouiz@nouiz.org>
date Wed, 27 Jan 2010 09:56:37 -0500
parents 580087712f69
children fbe470217937
line wrap: on
line source

import numpy
from pylearn.shared.layers import SigmoidalLayer, LogisticRegression
from theano import tensor
from theano.compile import shared, pfunc

def test_w_random(dtype='float64'):
    if dtype == 'float64':
        x = tensor.dmatrix()
    else:
        x = tensor.fmatrix()
    y = tensor.lvector()
    rng = numpy.random.RandomState(23455)

    bsize=10
    n_in = 10
    n_hid = 12
    n_out = 2
    n_iter=100

    layer = SigmoidalLayer.new(rng, x, n_in, n_hid, squash_fn='tanh', dtype=dtype)
    out = LogisticRegression.new(layer.output, n_hid, 2)
    cost = out.nll(y).sum()
    params = out.params+layer.params
    updates = [(p, p - numpy.asarray(0.01, dtype=dtype)*gp) for p,gp in zip(params, tensor.grad(cost, params)) ]
    f = pfunc([x, y], cost, updates=updates)

    w0 = layer.w.value.copy()
    b0 = layer.b.value.copy()

    xval = numpy.asarray(rng.rand(bsize, n_in), dtype=dtype)
    yval = numpy.asarray(rng.randint(0,2,bsize), dtype='int64')
    f0 = f(xval, yval)
    for i in xrange(n_iter):
        fN = f(xval, yval)
        print i, 'rval', fN

    assert f0 > 6
    assert fN < 2 

    assert numpy.all(w0 != layer.w.value)
    assert numpy.all(b0 != layer.b.value)