# HG changeset patch # User James Bergstra # Date 1282310919 14400 # Node ID 97f883b2cf784fb9a5d6498e3c829288ff59eac0 # Parent 8e78afa74313c8a6be452c6db2590f587c26b44d Removed algorithms/minimizer - old and useless code. diff -r 8e78afa74313 -r 97f883b2cf78 pylearn/algorithms/minimizer.py --- a/pylearn/algorithms/minimizer.py Fri Aug 20 09:27:59 2010 -0400 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,36 +0,0 @@ -"""Define the interface and factory for gradient-based minimizers. -""" -import theano - -class DummyMinimizer(theano.Module): - """ The idea of a minimizer is that it provides an `step` function that will - eventually converge toward (maybe realize?) the minimum of a cost function. - - The step_cost function takes a step and returns the cost associated with either - the current or previous parameter values (return whichever is easiest to compute, it's - meant for user feedback.) - - """ - def __init__(self, args, cost, parameters, gradients=None): - super(DummyMinimizer, self).__init__() - - def _instance_step(self, obj, *args): - """Move the parameters toward the minimum of a cost - - :param args: The arguments here should be values for the Variables that were in the - `args` argument to the constructor. - - :Return: None - """ - pass - - def _instance_step_cost(self, obj, *args): - """Move the parameters toward the minimum of a cost, and compute the cost - - :param args: The arguments here should be values for the Variables that were in the - `args` argument to the constructor. - - :Return: The current cost value. - """ - pass -