view algorithms/logistic_regression.py @ 492:6dfdcee64e9b

merge
author Olivier Breuleux <breuleuo@iro.umontreal.ca>
date Tue, 28 Oct 2008 11:39:47 -0400
parents 180d125dc7e2
children 7560817a07e8
line wrap: on
line source

import theano
from theano import tensor as T
from theano.tensor import nnet_ops
from theano.compile import module
from theano import printing, pprint
from theano import compile

import numpy as N


class Module_Nclass(module.FancyModule):
    class InstanceType(module.FancyModuleInstance):
        def initialize(self, n_in, n_out, rng=N.random):
            #self.component is the LogisticRegressionTemplate instance that built this guy.

            self.w = N.zeros((n_in, n_out))
            self.b = N.zeros(n_out)
            self.lr = 0.01
            self.__hide__ = ['params']

    def __init__(self, x=None, targ=None, w=None, b=None, lr=None, regularize=False):
        super(Module_Nclass, self).__init__() #boilerplate

        self.x = x if x is not None else T.matrix()
        self.targ = targ if targ is not None else T.lvector()

        self.w = w if w is not None else module.Member(T.dmatrix())
        self.b = b if b is not None else module.Member(T.dvector())
        self.lr = lr if lr is not None else module.Member(T.dscalar())

        self.params = [p for p in [self.w, self.b] if p.owner is None]

        xent, y = nnet_ops.crossentropy_softmax_1hot(
                T.dot(self.x, self.w) + self.b, self.targ)
        sum_xent = T.sum(xent)

        self.y = y
        self.sum_xent = sum_xent
        self.cost = sum_xent

        #define the apply method
        self.pred = T.argmax(T.dot(self.x, self.w) + self.b, axis=1)
        self.apply = module.Method([self.x], self.pred)

        if self.params:
            gparams = T.grad(sum_xent, self.params)

            self.update = module.Method([self.x, self.targ], sum_xent,
                    updates = dict((p, p - self.lr * g) for p, g in zip(self.params, gparams)))

class Module(module.FancyModule):
    class InstanceType(module.FancyModuleInstance):
        def initialize(self, n_in):
            #self.component is the LogisticRegressionTemplate instance that built this guy.

            self.w = N.random.randn(n_in,1)
            self.b = N.random.randn(1)
            self.lr = 0.01
            self.__hide__ = ['params']

    def __init__(self, x=None, targ=None, w=None, b=None, lr=None, regularize=False):
        super(Module, self).__init__() #boilerplate

        self.x = x if x is not None else T.matrix()
        self.targ = targ if targ is not None else T.lcol()

        self.w = w if w is not None else module.Member(T.dmatrix())
        self.b = b if b is not None else module.Member(T.dvector())
        self.lr = lr if lr is not None else module.Member(T.dscalar())

        self.params = [p for p in [self.w, self.b] if p.owner is None]

        y = nnet_ops.sigmoid(T.dot(self.x, self.w))
        xent = -self.targ * T.log(y) - (1.0 - self.targ) * T.log(1.0 - y)
        sum_xent = T.sum(xent)

        self.y = y
        self.xent = xent
        self.sum_xent = sum_xent
        self.cost = sum_xent

        #define the apply method
        self.pred = (T.dot(self.x, self.w) + self.b) > 0.0
        self.apply = module.Method([self.x], self.pred)

        #if this module has any internal parameters, define an update function for them
        if self.params:
            gparams = T.grad(sum_xent, self.params)
            self.update = module.Method([self.x, self.targ], sum_xent,
                                        updates = dict((p, p - self.lr * g) for p, g in zip(self.params, gparams)))



class Learner(object):
    """TODO: Encapsulate the algorithm for finding an optimal regularization coefficient"""
    pass