def analytical_value_k_ejr1(distr1, distr2, u, par1, par2):
    """ Analytical value of the Jensen-Renyi kernel-1.

    Parameters
    ----------
    distr1, distr2 : str-s
                     Names of distributions.
    u    : float, >0
           Parameter of the Jensen-Renyi kernel-1 (alpha = 2: fixed).
    par1, par2 : dictionary-s
                 Parameters of distributions. If (distr1, distr2) =
                 ('normal', 'normal'), then both distributions are normal:
                 distr1 = N(m1,s1^2 I) with m1 = par1['mean'], s1 =
                 par1['std'], distr2 = N(m2,s2^2 I) with m2 =
                 par2['mean'], s2 = par2['std'].

    References
    ----------
    Fei Wang, Tanveer Syeda-Mahmood, Baba C. Vemuri, David Beymer, and
    Anand Rangarajan. Closed-Form Jensen-Renyi Divergence for Mixture of
    Gaussians and Applications to Group-Wise Shape Registration. Medical
    Image Computing and Computer-Assisted Intervention, 12: 648–655, 2009.
    
    """ 
    
    if distr1 == 'normal' and distr2 == 'normal':
        m1, s1 = par1['mean'], par1['std']
        m2, s2 = par2['mean'], par2['std']
        w = array([1/2, 1/2])
        h = compute_h2(w, (m1, m2), (s1, s2))  # quadratic Renyi entropy
        k = exp(-u * h)
    else:
        raise Exception('Distribution=?')
        
    return k
def analytical_value_d_jensen_renyi(distr1, distr2, w, par1, par2):
    """ Analytical value of the Jensen-Renyi divergence.

    Parameters
    ----------    
    distr1, distr2 : str-s
                     Names of distributions.
    w    : vector, w[i] > 0 (for all i), sum(w) = 1
           Weight used in the Jensen-Renyi divergence.                     
    par1, par2 : dictionary-s
                 Parameters of distributions. If (distr1, distr2) =
                 ('normal', 'normal'), then both distributions are normal:
                 distr1 = N(m1,s1^2 I) with m1 = par1['mean'], s1 =
                 par1['std'], distr2 = N(m2,s2^2 I) with m2 =
                 par2['mean'], s2 = par2['std'].

    Returns
    -------
    d : float
        Analytical value of the Jensen-Renyi divergence.
        
    References           
    ----------
    Fei Wang, Tanveer Syeda-Mahmood, Baba C. Vemuri, David Beymer, and
    Anand Rangarajan. Closed-Form Jensen-Renyi Divergence for Mixture of
    Gaussians and Applications to Group-Wise Shape Registration. Medical
    Image Computing and Computer-Assisted Intervention, 12: 648–655, 2009.
    
    """
    
    if distr1 == 'normal' and distr2 == 'normal':
        m1, s1 = par1['mean'], par1['std']
        m2, s2 = par2['mean'], par2['std']
        term1 = compute_h2(w, (m1, m2), (s1, s2))
        term2 = \
            w[0] * compute_h2((1,), (m1,), (s1,)) +\
            w[1] * compute_h2((1,), (m2,), (s2,))

        # H2(\sum_i wi yi) - \sum_i w_i H2(yi), where H2 is the quadratic
        # Renyi entropy:
        d = term1 - term2

    else:
        raise Exception('Distribution=?')
        
    return d
def analytical_value_k_ejt2(distr1, distr2, u, par1, par2):
    """ Analytical value of the Jensen-Tsallis kernel-2.

    Parameters
    ----------
    distr1, distr2 : str-s
                     Names of distributions.
    u    : float, >0
           Parameter of the Jensen-Tsallis kernel-2 (alpha = 2: fixed).
    par1, par2 : dictionary-s
                 Parameters of distributions. If (distr1, distr2) =
                 ('normal', 'normal'), then both distributions are normal:
                 distr1 = N(m1,s1^2 I) with m1 = par1['mean'], s1 =
                 par1['std'], distr2 = N(m2,s2^2 I) with m2 =
                 par2['mean'], s2 = par2['std'].

    References
    ----------
    Fei Wang, Tanveer Syeda-Mahmood, Baba C. Vemuri, David Beymer, and
    Anand Rangarajan. Closed-Form Jensen-Renyi Divergence for Mixture of
    Gaussians and Applications to Group-Wise Shape Registration. Medical
    Image Computing and Computer-Assisted Intervention, 12: 648–655, 2009.
    (analytical value of the Jensen-Renyi divergence)
    
    """ 
    
    if distr1 == 'normal' and distr2 == 'normal':
        m1, s1 = par1['mean'], par1['std']
        m2, s2 = par2['mean'], par2['std']
        w = array([1/2, 1/2])
        # quadratic Renyi entropy -> quadratic Tsallis entropy:
        term1 = 1 - exp(-compute_h2(w, (m1, m2), (s1, s2)))
        term2 = \
            w[0] * (1 - exp(-compute_h2((1, ), (m1, ), (s1,)))) +\
            w[1] * (1 - exp(-compute_h2((1,), (m2,), (s2,))))
        # H2(\sum_i wi Yi) - \sum_i w_i H2(Yi), where H2 is the quadratic
        # Tsallis entropy:
        d = term1 - term2

        k = exp(-u * d)
    else:
        raise Exception('Distribution=?')
        
    return k