comparison deep/convolutional_dae/sgd_opt.py @ 279:206374eed2fb

Merge
author fsavard
date Wed, 24 Mar 2010 14:36:55 -0400
parents 727ed56fad12
children 80ee63c3e749
comparison
equal deleted inserted replaced
278:43afd29f3dbd 279:206374eed2fb
1 import time
2 import sys
3
4 def sgd_opt(train, valid, test, training_epochs=10000, patience=10000,
5 patience_increase=2., improvement_threshold=0.995,
6 validation_frequency=None):
7
8 if validation_frequency is None:
9 validation_frequency = patience/2
10
11 start_time = time.clock()
12
13 best_params = None
14 best_validation_loss = float('inf')
15 test_score = 0.
16
17 start_time = time.clock()
18
19 for epoch in xrange(1, training_epochs+1):
20 train()
21
22 if epoch % validation_frequency == 0:
23 this_validation_loss = valid()
24 print('epoch %i, validation error %f %%' % \
25 (epoch, this_validation_loss*100.))
26
27 # if we got the best validation score until now
28 if this_validation_loss < best_validation_loss:
29
30 #improve patience if loss improvement is good enough
31 if this_validation_loss < best_validation_loss * \
32 improvement_threshold :
33 patience = max(patience, epoch * patience_increase)
34
35 # save best validation score and epoch number
36 best_validation_loss = this_validation_loss
37 best_epoch = epoch
38
39 # test it on the test set
40 test_score = test()
41 print((' epoch %i, test error of best model %f %%') %
42 (epoch, test_score*100.))
43
44 if patience <= epoch:
45 break
46
47 end_time = time.clock()
48 print(('Optimization complete with best validation score of %f %%,'
49 'with test performance %f %%') %
50 (best_validation_loss * 100., test_score*100.))
51 print ('The code ran for %f minutes' % ((end_time-start_time)/60.))
52