# HG changeset patch # User Olivier Delalleau # Date 1243445120 14400 # Node ID d4e703a617baca96b209660374bc51b0d24672aa # Parent 71a052a92b054fb9da507f323aa98749fca03df5 Renamed test_speed.py into check_speed.py so that nosetests does not run it diff -r 71a052a92b05 -r d4e703a617ba pylearn/sandbox/check_speed.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/pylearn/sandbox/check_speed.py Wed May 27 13:25:20 2009 -0400 @@ -0,0 +1,89 @@ +import numpy, time +from pylearn.datasets import * +#from misc import * +from pylearn.old_dataset.dataset import ArrayDataSet, CachedDataSet +from pylearn.old_dataset.lookup_list import LookupList + +def print_timing(f): + def new_f(*lst, **kw): + start = time.time() + f(*lst, **kw) + print "Time elapsed: %s" % (time.time() - start) + return new_f + +def run(array, ds): + print "test_speed", ds.__class__ + + mat = numpy.random.rand(400,100) + + @print_timing + def f_array_full(a): + a+1 + @print_timing + def f_array_index(a): + for id in range(a.shape[0]): +# pass + a[id]+1 +# a[id]*mat + @print_timing + def f_array_iter(a): + for r in a: +# pass + r+1 +# r*mat + @print_timing + def f_ds_index(ds): + for id in range(len(ds)): +# pass + ds[id][0]+1 +# ds[id][0]*mat + @print_timing + def f_ds_iter(ds): + for ex in ds: +# pass + ex[0]+1 +# a[0]*mat + @print_timing + def f_ds_mb1(ds,mb_size): + for exs in ds.minibatches(minibatch_size = mb_size): + for ex in exs: +# pass + ex[0]+1 +# ex[0]*mat + + f_array_full(array) + f_array_index(array) + f_array_iter(array) + + f_ds_index(ds) + f_ds_iter(ds) + + f_ds_mb1(ds,10) + f_ds_mb1(ds,100) + f_ds_mb1(ds,1000) + f_ds_mb1(ds,10000) + +# Subset of tests when run by nosetests. +def run_test(): + a2 = numpy.random.rand(100000,400) + ds1 = ArrayDataSet(a2,{'all':slice(0,a2.shape[1],1)}) + run(a2, ds1) + return a2, ds1 + +# Full set of tests when run from command line. +def run_full(): + a2, ds1 = run_test() + a1 = numpy.random.rand(100000,40) + ds4 = ArrayDataSet(a1,LookupList(["f"+str(x)for x in range(a1.shape[1])], + range(a1.shape[1]))) + run(a2,ds4) + ds2=CachedDataSet(ds1,cache_all_upon_construction=False) + run(a2,ds2) + del ds2 + ds3=CachedDataSet(ds1,cache_all_upon_construction=True) + run(a2,ds3) + del a2,ds1,ds3 + +if __name__=='__main__': + run_full() + diff -r 71a052a92b05 -r d4e703a617ba pylearn/sandbox/test_speed.py --- a/pylearn/sandbox/test_speed.py Wed May 27 12:08:18 2009 -0400 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,89 +0,0 @@ -import numpy, time -from pylearn.datasets import * -#from misc import * -from pylearn.old_dataset.dataset import ArrayDataSet, CachedDataSet -from pylearn.old_dataset.lookup_list import LookupList - -def print_timing(f): - def new_f(*lst, **kw): - start = time.time() - f(*lst, **kw) - print "Time elapsed: %s" % (time.time() - start) - return new_f - -def run(array, ds): - print "test_speed", ds.__class__ - - mat = numpy.random.rand(400,100) - - @print_timing - def f_array_full(a): - a+1 - @print_timing - def f_array_index(a): - for id in range(a.shape[0]): -# pass - a[id]+1 -# a[id]*mat - @print_timing - def f_array_iter(a): - for r in a: -# pass - r+1 -# r*mat - @print_timing - def f_ds_index(ds): - for id in range(len(ds)): -# pass - ds[id][0]+1 -# ds[id][0]*mat - @print_timing - def f_ds_iter(ds): - for ex in ds: -# pass - ex[0]+1 -# a[0]*mat - @print_timing - def f_ds_mb1(ds,mb_size): - for exs in ds.minibatches(minibatch_size = mb_size): - for ex in exs: -# pass - ex[0]+1 -# ex[0]*mat - - f_array_full(array) - f_array_index(array) - f_array_iter(array) - - f_ds_index(ds) - f_ds_iter(ds) - - f_ds_mb1(ds,10) - f_ds_mb1(ds,100) - f_ds_mb1(ds,1000) - f_ds_mb1(ds,10000) - -# Subset of tests when run by nosetests. -def run_test(): - a2 = numpy.random.rand(100000,400) - ds1 = ArrayDataSet(a2,{'all':slice(0,a2.shape[1],1)}) - run(a2, ds1) - return a2, ds1 - -# Full set of tests when run from command line. -def run_full(): - a2, ds1 = run_test() - a1 = numpy.random.rand(100000,40) - ds4 = ArrayDataSet(a1,LookupList(["f"+str(x)for x in range(a1.shape[1])], - range(a1.shape[1]))) - run(a2,ds4) - ds2=CachedDataSet(ds1,cache_all_upon_construction=False) - run(a2,ds2) - del ds2 - ds3=CachedDataSet(ds1,cache_all_upon_construction=True) - run(a2,ds3) - del a2,ds1,ds3 - -if __name__=='__main__': - run_full() -