annotate algorithms/sgd.py @ 524:317a052f9b14
better main, allow to debug in a debugger.
author |
Frederic Bastien <bastienf@iro.umontreal.ca> |
date |
Fri, 14 Nov 2008 16:46:03 -0500 |
parents |
fbfd3932fd00 |
children |
|
rev |
line source |
477
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff
changeset
|
1
|
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff
changeset
|
2 from theano.compile import module
|
479
|
3 from theano import tensor as T
|
477
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff
changeset
|
4
|
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff
changeset
|
5 class StochasticGradientDescent(module.FancyModule):
|
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff
changeset
|
6 def __init__(self, params, gparams, lr=None):
|
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff
changeset
|
7 super(StochasticGradientDescent, self).__init__()
|
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff
changeset
|
8
|
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff
changeset
|
9 self.lr = lr if lr is not None else module.Member(T.dscalar())
|
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff
changeset
|
10 self.params = params
|
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff
changeset
|
11 self.gparams = gparams
|
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff
changeset
|
12
|
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff
changeset
|
13 self.updates = dict((p, p - self.lr * g) for p, g in zip(self.params, self.gparams))
|
James Bergstra <bergstrj@iro.umontreal.ca>
parents:
diff
changeset
|
14
|