diff deep/stacked_dae/v_sylvain/sgd_optimization.py @ 455:09e1c5872c2b

Ajout de trois lignes de code pour le calcul de l'erreur standart
author SylvainPL <sylvain.pannetier.lebeuf@umontreal.ca>
date Wed, 26 May 2010 20:23:02 -0400
parents 5e11dda78995
children 78ed4628071d
line wrap: on
line diff
--- a/deep/stacked_dae/v_sylvain/sgd_optimization.py	Thu May 13 12:15:16 2010 -0400
+++ b/deep/stacked_dae/v_sylvain/sgd_optimization.py	Wed May 26 20:23:02 2010 -0400
@@ -395,6 +395,7 @@
                 self.classifier.params[idx].value=copy(x)
 
     def training_error(self,dataset,part=0):
+        import math
         # create a function to compute the mistakes that are made by the model
         # on the validation set, or testing set
         test_model = \
@@ -415,6 +416,8 @@
         train_score2 = numpy.mean(train_losses2)
         print 'On the ' + name + 'dataset'
         print(('\t the error is %f')%(train_score2*100.))
+        stderr = math.sqrt(train_score2-train_score2**2)/math.sqrt(len(train_losses2)*self.hp.minibatch_size)
+        print (('\t the stderr is %f')%(stderr*100.))
     
     #To see the prediction of the model, the real answer and the image to judge    
     def see_error(self, dataset):