view misc_theano.py @ 451:d99fefbc9324

Added a KL-divergence.
author Joseph Turian <turian@gmail.com>
date Thu, 04 Sep 2008 14:46:30 -0400
parents 2480024bf401
children
line wrap: on
line source


import theano

class Print(theano.Op):
    def __init__(self,message=""):
        self.message=message
        self.view_map={0:[0]}

    def make_node(self,xin):
        xout = xin.type.make_result()
        return theano.Apply(op = self, inputs = [xin], outputs=[xout])

    def perform(self,node,inputs,output_storage):
        xin, = inputs
        xout, = output_storage
        xout[0] = xin
        print self.message,xin

    def grad(self,input,output_gradients):
        return output_gradients