예제 #1
0
def transfer_entropy(X, Y, lag = 1):
    #========================================================
    # TRANSFER_ENTROPY This funcion computes the transfer entropy for two given 
    # signals
    #  te =transfer_entropy(data,lag)
    #  INPUT:
    #   X: target (points of the signal)
    #   Y: source (points of the signal)
    #   lag: The number of samples to lag to obtain future series
    #  OUTPUT:  
    #   te: Raw Transfer entropy 

    import numpy as np
    from CPAC.series_mod import cond_entropy
    from CPAC.series_mod import entropy
    
    # future of i
    Fi = np.roll(X, -lag)
    # past of i
    Pi = X
    # past of j
    Pj = Y
    
    #Transfer entropy
    Inf_from_Pi_to_Fi = cond_entropy(Fi, Pi)

    # same as cond_entropy(Fi, Pi_Pj)
    Hy = entropy(Pi,Pj)
    Hyx = entropy(Fi,Pj,Pi)  
    Inf_from_Pi_Pj_to_Fi = Hyx - Hy     

    TE_from_j_to_i = Inf_from_Pi_to_Fi-Inf_from_Pi_Pj_to_Fi         
        
    return TE_from_j_to_i    
예제 #2
0
def transfer_entropy(X, Y, lag=1):
    #========================================================
    # TRANSFER_ENTROPY This funcion computes the transfer entropy for two given
    # signals
    #  te =transfer_entropy(data,lag)
    #  INPUT:
    #   X: target (points of the signal)
    #   Y: source (points of the signal)
    #   lag: The number of samples to lag to obtain future series
    #  OUTPUT:
    #   te: Raw Transfer entropy

    import numpy as np
    from CPAC.series_mod import cond_entropy
    from CPAC.series_mod import entropy

    # future of i
    Fi = np.roll(X, -lag)
    # past of i
    Pi = X
    # past of j
    Pj = Y

    #Transfer entropy
    Inf_from_Pi_to_Fi = cond_entropy(Fi, Pi)

    # same as cond_entropy(Fi, Pi_Pj)
    Hy = entropy(Pi, Pj)
    Hyx = entropy(Fi, Pj, Pi)
    Inf_from_Pi_Pj_to_Fi = Hyx - Hy

    TE_from_j_to_i = Inf_from_Pi_to_Fi - Inf_from_Pi_Pj_to_Fi

    return TE_from_j_to_i
예제 #3
0
def mutual_information(X,Y):

    from CPAC.series_mod import entropy

    Hx = entropy(X)
    Hy = entropy(Y)
    Hxy = entropy(X,Y)
    MI = Hx + Hy - Hxy
    
    return MI
예제 #4
0
def mutual_information(X, Y):

    from CPAC.series_mod import entropy

    Hx = entropy(X)
    Hy = entropy(Y)
    Hxy = entropy(X, Y)
    MI = Hx + Hy - Hxy

    return MI
예제 #5
0
def cond_entropy(X, Y):
    """
    Conditional entropy H(X|Y) = H(Y,X) - H(Y). X conditioned on Y
    """
    from CPAC.series_mod import entropy

    Hy = entropy(Y)
    Hyx = entropy(Y, X)
    CE = Hyx - Hy

    return CE
예제 #6
0
def cond_entropy(X,Y):
    
    """
    Conditional entropy H(X|Y) = H(Y,X) - H(Y). X conditioned on Y
    """
    from CPAC.series_mod import entropy

    Hy = entropy(Y)
    Hyx = entropy(Y,X)
    CE = Hyx - Hy    
    
    return CE
예제 #7
0
def entropy_cc(X, Y):  #ENTROPY CORRELATION COEFFICIENT
    """
    Entropy correlation coefficient p(H) = sqrt(I(X,Y)/0.5*(H(X)+H(Y)))
    """
    import numpy as np
    from CPAC.series_mod import entropy
    from CPAC.series_mod import mutual_information

    Hx = entropy(X)
    Hy = entropy(Y)
    Ixy = mutual_information(Y, X)
    ECC = np.sqrt(Ixy / (0.5 * (Hx + Hy)))

    return ECC
예제 #8
0
def entropy_cc(X,Y): #ENTROPY CORRELATION COEFFICIENT
    
    """
    Entropy correlation coefficient p(H) = sqrt(I(X,Y)/0.5*(H(X)+H(Y)))
    """
    import numpy as np
    from CPAC.series_mod import entropy
    from CPAC.series_mod import mutual_information
    
    Hx = entropy(X)
    Hy = entropy(Y)
    Ixy = mutual_information(Y,X)
    ECC = np.sqrt(Ixy/(0.5*(Hx+Hy)))
    
    return ECC