def jsDivergence( targetReal, targetInft, predReal, predInft ):
    m = td.multiply( 0.5, 0.0, *td.add( targetReal, targetInft, predReal, predInft ) )
    tpart = td.multiply( targetReal, targetInft, *td.log( *td.divide( targetReal, targetInft, *m ) ) )
    ppart = td.multiply( predReal, predInft, *td.log( *td.divide( predReal, predInft, *m ) ) )
    return td.multiply( 0.5, 0.0, *td.add( *tpart, *ppart ) )
def softmax(real, inft, t=1):
    raw = td.exp(*td.divide(real, inft, t, 0.0))
    cs = td.sum(*raw, -1)
    return td.divide(*raw, *cs)
def klDivergence( targetReal, targetInft, predReal, predInft ):
    return td.multiply( targetReal, targetInft, *td.log( *td.divide( targetReal, targetInft, predReal, predInft ) ) )
def logistic(real, inft):
    return td.divide(1.0, 0.0, *td.add(1.0, 0.0, *td.exp(-real, -inft)))