Mercurial > pylearn
view pylearn/shared/layers/sigmoidal_layer.py @ 891:fc9779dcd710
some backport to python 2.4
author | Frederic Bastien <nouiz@nouiz.org> |
---|---|
date | Tue, 05 Jan 2010 10:48:59 -0500 |
parents | 580087712f69 |
children | 912be602c3ac |
line wrap: on
line source
""" Provide the "normal" sigmoidal layers for making multi-layer perceptrons / neural nets """ import logging import numpy import theano from theano import tensor from theano.compile.sandbox import shared, pfunc from pylearn.shared.layers.util import update_locals, add_logging from pylearn.shared.layers.squash import squash class SigmoidalLayer(object): def __init__(self, input, w, b, squash_fn, params): """ :param input: a symbolic tensor of shape (n_examples, n_in) :param w: a symbolic weight matrix of shape (n_in, n_out) :param b: symbolic bias terms of shape (n_out,) :param squash: an squashing function """ output = squash_fn(tensor.dot(input, w) + b) update_locals(self, locals()) @classmethod def new(cls, rng, input, n_in, n_out, squash_fn=tensor.tanh, dtype=None): """Allocate a SigmoidLayer with weights to transform inputs with n_in dimensions, to outputs of n_out dimensions. Weights are initialized randomly using rng. :param squash_fn: an op constructor function, or a string that has been registed as a `squashing_function`. :param dtype: the numerical type to use for the parameters (i.e. 'float32', 'float64') """ if dtype is None: dtype = input.dtype cls._debug('allocating weights and biases', n_in, n_out, dtype) w = shared( numpy.asarray( rng.uniform(low=-2/numpy.sqrt(n_in), high=2/numpy.sqrt(n_in), size=(n_in, n_out)), dtype=dtype)) b = shared(numpy.asarray(numpy.zeros(n_out), dtype=dtype)) return cls(input, w, b, squash(squash_fn), [w,b]) add_logging(SigmoidalLayer)