changeset 820:2333cd78f574

bug fixes DAA input groups
author Xavier Glorot <glorotxa@iro.umontreal.ca>
date Wed, 02 Sep 2009 18:28:37 -0400
parents 7dfecf11cbf4
children f1a29c772210
files pylearn/algorithms/sandbox/DAA_inputs_groups.py
diffstat 1 files changed, 3 insertions(+), 4 deletions(-) [+]
line wrap: on
line diff
--- a/pylearn/algorithms/sandbox/DAA_inputs_groups.py	Wed Sep 02 14:23:50 2009 -0700
+++ b/pylearn/algorithms/sandbox/DAA_inputs_groups.py	Wed Sep 02 18:28:37 2009 -0400
@@ -94,7 +94,7 @@
     return theano.tensor.tanh(x/2.0)
 
 def softsign_act(x):
-    return x/(1.0 + theano.tensor.abs(x))
+    return x/(1.0 + T.abs_(x))
 
 # costs utils:---------------------------------------------------
 # in order to fix numerical instability of the cost and gradient calculation for the cross entropy we calculate it
@@ -113,7 +113,7 @@
     return -T.mean(T.sum(XE-XS, axis=sum_axis),axis=mean_axis)
 
 def softsign_cross_entropy(target, output_act, mean_axis, sum_axis):
-    newact = ((output_act/(1.0 + theano.tensor.abs(output_act)))+1)/2.0
+    newact = ((output_act/(1.0 + T.abs_(output_act)))+1)/2.0
     XE = (target+1)/2.0 * T.log(newact) + (1 - (target+1)/2.0) * T.log(1 - newact)
     XS = T.xlogx.xlogx((target+1)/2.0) + T.xlogx.xlogx(1-(target+1)/2.0)
     return -T.mean(T.sum(XE-XS, axis=sum_axis),axis=mean_axis)
@@ -547,8 +547,7 @@
         print '\trec_fn = ', rec_fn
         print '\tact_reg = ', act_reg
         print '\treconstruction_cost_function = ', reconstruction_cost_function
-        print '\tblockgrad = ', blockgrad
-        print '\tact_reg = ', act_reg
+        print '\tblockgrad = ', self.blockgrad
         print '\tn_out = ', self.n_out
         
         # init for model construction