# HG changeset patch # User Xavier Glorot # Date 1251930517 14400 # Node ID 2333cd78f5745918a85ed41b4bdcd2a325d28f79 # Parent 7dfecf11cbf416b0e03a88cb2eb8c4b2fc84b8f8 bug fixes DAA input groups diff -r 7dfecf11cbf4 -r 2333cd78f574 pylearn/algorithms/sandbox/DAA_inputs_groups.py --- a/pylearn/algorithms/sandbox/DAA_inputs_groups.py Wed Sep 02 14:23:50 2009 -0700 +++ b/pylearn/algorithms/sandbox/DAA_inputs_groups.py Wed Sep 02 18:28:37 2009 -0400 @@ -94,7 +94,7 @@ return theano.tensor.tanh(x/2.0) def softsign_act(x): - return x/(1.0 + theano.tensor.abs(x)) + return x/(1.0 + T.abs_(x)) # costs utils:--------------------------------------------------- # in order to fix numerical instability of the cost and gradient calculation for the cross entropy we calculate it @@ -113,7 +113,7 @@ return -T.mean(T.sum(XE-XS, axis=sum_axis),axis=mean_axis) def softsign_cross_entropy(target, output_act, mean_axis, sum_axis): - newact = ((output_act/(1.0 + theano.tensor.abs(output_act)))+1)/2.0 + newact = ((output_act/(1.0 + T.abs_(output_act)))+1)/2.0 XE = (target+1)/2.0 * T.log(newact) + (1 - (target+1)/2.0) * T.log(1 - newact) XS = T.xlogx.xlogx((target+1)/2.0) + T.xlogx.xlogx(1-(target+1)/2.0) return -T.mean(T.sum(XE-XS, axis=sum_axis),axis=mean_axis) @@ -547,8 +547,7 @@ print '\trec_fn = ', rec_fn print '\tact_reg = ', act_reg print '\treconstruction_cost_function = ', reconstruction_cost_function - print '\tblockgrad = ', blockgrad - print '\tact_reg = ', act_reg + print '\tblockgrad = ', self.blockgrad print '\tn_out = ', self.n_out # init for model construction