Mercurial > ift6266
diff deep/stacked_dae/v_sylvain/sgd_optimization.py @ 235:ecb69e17950b
correction de bugs
author | SylvainPL <sylvain.pannetier.lebeuf@umontreal.ca> |
---|---|
date | Sun, 14 Mar 2010 20:25:12 -0400 |
parents | 02ed13244133 |
children | 9fc641d7adda |
line wrap: on
line diff
--- a/deep/stacked_dae/v_sylvain/sgd_optimization.py Sun Mar 14 15:17:04 2010 -0400 +++ b/deep/stacked_dae/v_sylvain/sgd_optimization.py Sun Mar 14 20:25:12 2010 -0400 @@ -118,9 +118,11 @@ # go through pretraining epochs for epoch in xrange(self.hp.pretraining_epochs_per_layer): # go through the training set + batch_index=int(0) for x,y in dataset.train(self.hp.minibatch_size): c = self.classifier.pretrain_functions[i](x) - + batch_index+=1 + self.series["reconstruction_error"].append((epoch, batch_index), c) print 'Pre-training layer %i, epoch %d, cost '%(i,epoch),c @@ -140,6 +142,8 @@ #index = T.lscalar() # index to a [mini]batch minibatch_size = self.hp.minibatch_size + ensemble_x = T.matrix('ensemble_x') + ensemble_y = T.ivector('ensemble_y') # create a function to compute the mistakes that are made by the model # on the validation set, or testing set