# HG changeset patch # User Frederic Bastien # Date 1212509643 14400 # Node ID 9502f100eda50dc624f1636415ff4c148233a6e8 # Parent c702abb7f87557ae97a5576f46d6850623a57d72# Parent 0fb75fdd727d3f2599e54c5437ed422b710700d5 Automated merge with ssh://projects@lgcm.iro.umontreal.ca/hg/pylearn diff -r c702abb7f875 -r 9502f100eda5 test_dataset.py --- a/test_dataset.py Mon Jun 02 17:09:58 2008 -0400 +++ b/test_dataset.py Tue Jun 03 12:14:03 2008 -0400 @@ -500,38 +500,52 @@ ds = ArrayDataSet(a2,{'all':slice(0,a2.shape[1],1)}) mat = numpy.random.rand(400,100) @print_timing - def f_array1(a): + def f_array_full(a): a+1 @print_timing - def f_array2(a): + def f_array_index(a): for id in range(a.shape[0]): # pass a[id]+1 # a[id]*mat @print_timing - def f_ds(ds): + def f_array_iter(a): + for r in a: +# pass + r+1 +# r*mat + @print_timing + def f_ds_index(ds): + for id in range(len(ds)): +# pass + ds[id][0]+1 +# ds[id][0]*mat + @print_timing + def f_ds_iter(ds): for ex in ds: # pass ex[0]+1 -# a[id]*mat +# a[0]*mat @print_timing def f_ds_mb1(ds,mb_size): for exs in ds.minibatches(minibatch_size = mb_size): for ex in exs: # pass ex[0]+1 -# ex[id]*mat +# ex[0]*mat @print_timing def f_ds_mb2(ds,mb_size): for exs in ds.minibatches(minibatch_size = mb_size): # pass exs[0]+1 -# ex[id]*mat +# ex[0]*mat - f_array1(a2) - f_array2(a2) + f_array_full(a2) + f_array_index(a2) + f_array_iter(a2) - f_ds(ds) + f_ds_index(ds) + f_ds_iter(ds) f_ds_mb1(ds,10) f_ds_mb1(ds,100)