def softmax(a, b, alpha=1, normalize=0):
    """The softmaximum of softmax(a,b) = log(e^a + a^b).
    normalize should be zero if a or b could be negative and can be 1.0 (more accurate)
    if a and b are strictly positive.
    Also called \alpha-quasimax: 
            J. Cook.  Basic properties of the soft maximum.  
            Working Paper Series 70, UT MD Anderson CancerCenter Department of Biostatistics, 
            2011. http://biostats.bepress.com/mdandersonbiostat/paper7
    """
    return np.log(np.exp(a * alpha) + np.exp(b * alpha) - normalize) / alpha
def softmax_smooth(a, b, smooth=0):
    """The smoothed softmaximum of softmax(a,b) = log(e^a + a^b).
    With smooth=0.0, is softmax; with smooth=1.0, averages a and b"""
    t = smooth / 2.0
    return np.log(np.exp((1 - t) * a + b * t) +
                  np.exp((1 - t) * b + t * a)) - np.log(1 + smooth)
def softmax(a, b, alpha=1, normalize=0):
    """The softmaximum of softmax(a,b) = log(e^a + a^b).
    normalize should be zero if a or b could be negative and can be 1.0 (more accurate)
    if a and b are strictly positive.
    """
    return np.log(np.exp(a * alpha) + np.exp(b * alpha) - normalize) / alpha