changeset 988:fd243cb2bf0b

mcRBM - moved some things to the top of the file
author James Bergstra <bergstrj@iro.umontreal.ca>
date Tue, 24 Aug 2010 14:02:29 -0400
parents 043aa1b748a7
children 9e753ddcc320
files pylearn/algorithms/mcRBM.py
diffstat 1 files changed, 23 insertions(+), 14 deletions(-) [+]
line wrap: on
line diff
--- a/pylearn/algorithms/mcRBM.py	Tue Aug 24 13:54:29 2010 -0400
+++ b/pylearn/algorithms/mcRBM.py	Tue Aug 24 14:02:29 2010 -0400
@@ -194,13 +194,24 @@
 import logging
 import numpy as np
 import numpy
+
+import theano
 from theano import function, shared, dot
 from theano import tensor as TT
 floatX = theano.config.floatX
 
+import pylearn
 from pylearn.sampling.hmc import HMC_sampler
 from pylearn.io import image_tiling
 
+#TODO: This should be in the datasets folder
+import pylearn.datasets.config
+from pylearn.dataset_ops.protocol import TensorFnDataset
+from pylearn.dataset_ops.memo import memo
+import pylearn
+import scipy.io
+import os
+
 
 #TODO: This should be in the nnet part of the library
 def sgd_updates(params, grads, lr):
@@ -212,6 +223,17 @@
     updates = [(p, p - plr * gp) for (plr, p, gp) in zip(lr, params, grads)]
     return updates
 
+@memo
+def load_mcRBM_demo_patches():
+    d = scipy.io.loadmat(os.path.join(pylearn.datasets.config.data_root(),'image_patches', 'mcRBM', 'training_colorpatches_16x16_demo.mat'))
+    totnumcases = d["whitendata"].shape[0]
+    #d = d["whitendata"][0:np.floor(totnumcases/batch_size)*batch_size,:].copy() 
+    d = d["whitendata"].copy()
+    return d
+
+
+
+
 # this is a little hack, probably should be removed
 # The logic about casting things to shared vars is busted anyway (wrt pickling)
 def as_shared(x, name=None, dtype=floatX):
@@ -355,19 +377,6 @@
         rval[1] = rval[1] - TT.sign(self.W)*W_l1_penalty
         return rval
 
-from pylearn.dataset_ops.protocol import TensorFnDataset
-from pylearn.dataset_ops.memo import memo
-import pylearn
-import scipy.io
-import os
-@memo
-def load_mcRBM_demo_patches():
-    d = scipy.io.loadmat('/u/bergstrj/cvs/articles/2010/spike_slab_RBM/src/marcaurelio/training_colorpatches_16x16_demo.mat')
-    totnumcases = d["whitendata"].shape[0]
-    #d = d["whitendata"][0:np.floor(totnumcases/batch_size)*batch_size,:].copy() 
-    d = d["whitendata"].copy()
-    return d
-
 
 if __name__ == '__main__':
 
@@ -377,7 +386,7 @@
     if dataset == 'MAR':
         R,C= 21,5
         n_patches=10240
-        demodata = scipy.io.loadmat('/u/bergstrj/cvs/articles/2010/spike_slab_RBM/src/marcaurelio/training_colorpatches_16x16_demo.mat')
+        demodata = scipy.io.loadmat(os.path.join(pylearn.datasets.config.data_root(),'image_patches', 'mcRBM', 'training_colorpatches_16x16_demo.mat'))
     else:
         R,C= 16,16 # the size of image patches
         n_patches=100000