# HG changeset patch # User Olivier Breuleux # Date 1228514720 18000 # Node ID 4e25f4e90c1ea7e74768e25a84ed9e126b0deb89 # Parent 9f5691d5587d7fd91baeb7b0b3f9114eb0f2b906# Parent e06d5ef74e6f5979a6a0431b9dbf2fe362df983f merge diff -r 9f5691d5587d -r 4e25f4e90c1e pylearn/algorithms/rnn.py --- a/pylearn/algorithms/rnn.py Fri Dec 05 17:05:06 2008 -0500 +++ b/pylearn/algorithms/rnn.py Fri Dec 05 17:05:20 2008 -0500 @@ -214,22 +214,22 @@ LAG = 4 y[LAG:] = x[:-LAG, 0:n_out] - minimizer_fn1 = make_minimizer('sgd', stepsize = 0.001, WEIRD_STUFF = False) - minimizer_fn2 = make_minimizer('sgd', stepsize = 0.001, WEIRD_STUFF = True) + minimizer_fn1 = make_minimizer('sgd', stepsize = 0.001) + minimizer_fn2 = make_minimizer('sgd', stepsize = 0.001) rnn_module1 = ExampleRNN(n_vis, n_hid, n_out, minimizer_fn1) rnn_module2 = ExampleRNN(n_vis, n_hid, n_out, minimizer_fn2) rnn1 = rnn_module2.make(mode='FAST_RUN') rnn2 = rnn_module1.make(mode='FAST_COMPILE') - topo1=rnn1.minimizer.step_cost.maker.env.toposort() - topo2=rnn2.minimizer.step_cost.maker.env.toposort() if 0: + topo1=rnn1.minimizer.step_cost.maker.env.toposort() + topo2=rnn2.minimizer.step_cost.maker.env.toposort() for i in range(len(topo1)): print '1',i, topo1[i] print '2',i, topo2[i] - niter=3 + niter=50 for i in xrange(niter): rnn1.minimizer.step(x, y) rnn2.minimizer.step(x, y) @@ -243,5 +243,5 @@ if __name__ == '__main__': # from theano.tests import main # main(__file__) -# test_example_rnn() + test_example_rnn() test_WEIRD_STUFF() diff -r 9f5691d5587d -r 4e25f4e90c1e pylearn/algorithms/sgd.py --- a/pylearn/algorithms/sgd.py Fri Dec 05 17:05:06 2008 -0500 +++ b/pylearn/algorithms/sgd.py Fri Dec 05 17:05:20 2008 -0500 @@ -8,13 +8,12 @@ class StochasticGradientDescent(module.FancyModule): """Fixed stepsize gradient descent""" - def __init__(self, args, cost, params, gradients=None, stepsize=None, WEIRD_STUFF=True): + def __init__(self, args, cost, params, gradients=None, stepsize=None): """ :param stepsize: the step to take in (negative) gradient direction :type stepsize: None, scalar value, or scalar TensorResult """ super(StochasticGradientDescent, self).__init__() - self.WEIRD_STUFF = WEIRD_STUFF self.stepsize_init = None if stepsize is None: @@ -22,12 +21,7 @@ elif isinstance(stepsize, T.TensorResult): self.stepsize = stepsize else: - if self.WEIRD_STUFF: - #TODO: why is this necessary? why does the else clause not work? - self.stepsize = module.Member(T.dscalar()) - self.stepsize_init = stepsize - else: - self.stepsize = module.Member(T.value(stepsize)) + self.stepsize = module.Member(T.value(stepsize)) if self.stepsize.ndim != 0: raise ValueError('stepsize must be a scalar', stepsize) @@ -44,10 +38,7 @@ args, cost, updates=self.updates) def _instance_initialize(self, obj): - if self.WEIRD_STUFF: - obj.stepsize = self.stepsize_init - else: - pass + pass @minimizer_factory('sgd') diff -r 9f5691d5587d -r 4e25f4e90c1e pylearn/algorithms/tests/test_logistic_regression.py --- a/pylearn/algorithms/tests/test_logistic_regression.py Fri Dec 05 17:05:06 2008 -0500 +++ b/pylearn/algorithms/tests/test_logistic_regression.py Fri Dec 05 17:05:20 2008 -0500 @@ -1,4 +1,4 @@ -from logistic_regression import * +from pylearn.algorithms.logistic_regression import * import sys, time if __name__ == '__main__':