view gradient_learner.py @ 13:633453635d51

Starting to work on gradient_based_learner.py
author bengioy@bengiomac.local
date Wed, 26 Mar 2008 21:38:08 -0400
parents
children 5ede27026e05
line wrap: on
line source


from learner import *
from tensor import *
import gradient
from compile import Function
from gradient_based_optimizer import *

class GradientLearner(Learner):
    """
    Generic Learner for gradient-based optimization of a training criterion
    that can consist in two parts, an additive part over examples, and
    an example-independent part (usually called the regularizer).
    The user provides a Theano formula that maps the fields of a training example
    and parameters to output fields (for the use function), one of which must be a cost
    that is the training criterion to be minimized. The user also provides
    a GradientBasedOptimizer that implements the optimization strategy.
    The inputs, parameters, outputs and lists of Theano tensors,
    while the example_wise_cost and regularization_term are Theano tensors.
    The user can specify a regularization coefficient that multiplies the regularization term.
    The training algorithm looks for parameters that minimize
       regularization_coefficienet * regularization_term(parameters) +
       sum_{inputs in training_set} example_wise_cost(inputs,parameters)
    i.e. the regularization_term should not depend on the inputs, only on the parameters.
    The learned function can map a subset of inputs to a subset of outputs (as long as the inputs subset
    includes all the inputs required in the Theano expression for the selected outputs).
    """
    def __init__(self, inputs, parameters, outputs, example_wise_cost, regularization_term,
                 gradient_based_optimizer=StochasticGradientDescent(), regularization_coefficient = astensor(1.0)):
        self.inputs = inputs
        self.outputs = outputs
        self.parameters = parameters
        self.example_wise_cost = example_wise_cost
        self.regularization_term = regularization_term
        self.gradient_based_optimizer = gradient_based_optimizer
        self.regularization_coefficient = regularization_coefficient
        self.parameters_example_wise_gradient = gradient.grad(example_wise_cost, parameters)
        self.parameters_regularization_gradient = gradient.grad(self.regularization_coefficient * regularization, parameters)

#    def update(self,training_set):