Mercurial > ift6266
changeset 303:ef28cbb5f464
Use sigmoids with cross-entropy cost in the ConvAutoencoders.
author | Arnaud Bergeron <abergeron@gmail.com> |
---|---|
date | Wed, 31 Mar 2010 15:54:47 -0400 |
parents | 1adfafdc3d57 |
children | 1e4bf5a5b46d |
files | deep/convolutional_dae/scdae.py |
diffstat | 1 files changed, 4 insertions(+), 2 deletions(-) [+] |
line wrap: on
line diff
--- a/deep/convolutional_dae/scdae.py Tue Mar 30 14:40:54 2010 -0400 +++ b/deep/convolutional_dae/scdae.py Wed Mar 31 15:54:47 2010 -0400 @@ -14,6 +14,8 @@ num_filt=num_filt, num_in=num_in, noisyness=corruption, + err=errors.cross_entropy, + nlin=nlins.sigmoid, dtype=dtype), MaxPoolLayer(subsampling)]) @@ -201,9 +203,9 @@ pretrain_funcs, trainf, evalf, net = build_funcs( img_size = (32, 32), batch_size=batch_size, filter_sizes=[(5,5), (3,3)], - num_filters=[4, 4], subs=[(2,2), (2,2)], noise=[0.2, 0.2], + num_filters=[12, 4], subs=[(2,2), (2,2)], noise=[0.2, 0.2], mlp_sizes=[500], out_size=10, dtype=numpy.float32, - pretrain_lr=0.01, train_lr=0.1) + pretrain_lr=0.001, train_lr=0.1) t_it = repeat_itf(dset.train, batch_size) pretrain_fs, train, valid, test = massage_funcs(