def calculate_correlation(timeseries_data):
    """
    Method to calculate correlation between 
    each voxel or node of data present in the 
    template
    
    Parameters
    ----------
    timeseries_data : string (numpy matrix file)
        Path to file containing data matrix
    
    Returns
    -------
    corr_file : string (mat file)
        path to file containing the correlation matrix
    
    """
    
    import os
    import numpy as np
    from CPAC.network_centrality import load_mat
    
    timeseries = load_mat(timeseries_data)
    r_matrix = np.corrcoef(timeseries)
    cwd = os.getcwd()
    
    print "shape of correlation matrix", r_matrix.shape
    
    corr_mat_file = os.path.join(cwd, 'r_matrix.npy')
    np.save(corr_mat_file,r_matrix)
    
    return corr_mat_file
def generate_adjacency_graph(correlation_matrix, threshold_matrix, 
                             weight_options, template_data,template_type):
    """
    Method to store the adjacency matrix as a compress sparse matrix which
    can be loaded inot matlab. The method also create a png image of the 
    graph.
    
    Parameters
    ----------
    correlation_matrix : string (numpy matrix file)
        path to correlation matrix file
    threshold_matrix : string (numpy matrix file)
        path to thresholded correlation matrix file
    weight_options: boolean
        True for weighted and False for binarize
    template_data : string (numpy matrix file)
        path to file containing parcellation unit
    template_type : string
        0 for mask, 1 for parcellation unit
        
    Returns
    -------
    out_matrix : string (mat file)
        compressed sparse matrix
    out_img : string (png file)
        path to graph image file
            
    """
    from pylab import imsave
    import os
    from scipy.sparse import lil_matrix, csc_matrix
    from scipy.io import savemat
    from CPAC.network_centrality import load_mat
    
    out_list =[]
    
    #if int(template_type)==1:
    
    thresh_matrix = load_mat(threshold_matrix)
    corr_matrix = load_mat(correlation_matrix)
    
    if isinstance(template_data, list):
        mask_name = os.path.splitext(os.path.basename(template_data[0]))[0]
    else:
        mask_name = os.path.splitext(os.path.basename(template_data))[0]
    
    
    def save(filename, key, matrix):
        import os
        
        out_matrix = os.path.join(os.getcwd(),filename + ".mat")
        out_img = os.path.join(os.getcwd(), filename + ".png")
        savemat(out_matrix, {key: matrix})
        print out_matrix
        print out_img
        out_list.append(out_matrix)
        imsave(out_img, matrix.todense())
        out_list.append(out_img)
    
    try:
        
        if weight_options[0]:
            spedgemat = lil_matrix(thresh_matrix)
            spcscmat = csc_matrix(spedgemat)
            del spedgemat
            filename = mask_name + "_adjacency_matrix"
            save(filename, 'unit_graph', spcscmat)
        
        if weight_options[1]:
            matrix = thresh_matrix * corr_matrix
            spedgemat = lil_matrix (matrix)
            spcscmat = csc_matrix (spedgemat)
            del spedgemat
            filename = mask_name +"_weighted_adjacency_matrix"
            save(filename,'unit_graph', spcscmat)
            
    except Exception:
        print "Not Enough Memory available to generate sparse matrix"
        raise
    #else:
    #    print "No voxel based sparse matrix generation, matrix size is too huge"
        
    
    return out_list
    
    
    
    
    
    
    
def threshold_rmatrix(corr_matrix, option, 
                      threshold, scans):
    
    """
    Method to threshold the correaltion matrix based on 
    any of the two threshold options- sparsity, probability
    or by simply providing correlation threshold. it is two 
    step process, first calculate the threshold and then apply
    the threshold to the correlation matrix.
    
    Parameters
    ----------
    corr_matrix : string (numpy npy file)
        patht o file containing correlation matrix
    option : string (int)
        list of threshold option: 0 for pvalue, 1 for sparsity, 
        any other for simply correlation threshold 
    threshold : string (float)
        pvalue/sparsity_threshold/correaltion_threshold
    scans : string (int)
        Total number of scans in input data
        
    Returns
    -------
    threshold_file : string (numpy npy file)
        file containing threshold correlation matrix
    
    Raises
    ------
    Exception
    """
    
    import numpy as np
    import os
    from CPAC.network_centrality import load_mat,\
                                        convert_pvalue_to_r,\
                                        convert_sparsity_to_r
    
    try:
        r_matrix = load_mat(corr_matrix)
       
        print "threshold_option -->", option
        
        try:
            if option == 0:
                r_value = convert_pvalue_to_r(scans, threshold)
            if option == 1:
                r_value = convert_sparsity_to_r(r_matrix, threshold)
            else:
                r_value = threshold
        except:
            print "Exception in calculating thresold value"
            raise
        
        print "correlation threshold value -> ", r_value
        print "thresholding the correlation matrix...."
        
        threshold_matrix = r_matrix > r_value
        
        threshold_file = os.path.join(os.getcwd(), 'threshold_matrix.npy')
        np.save(threshold_file, threshold_matrix.astype(np.float))
    
    except Exception:
        print "Exception while thresholding correlation matrix"
        raise
    
    return threshold_file
Example #4
0
def get_centrality_opt(timeseries_data,
                       method_options,
                       weight_options,
                       memory_allocated,
                       threshold,
                       scans,
                       r_value=None):
    """
    Method to calculate degree and eigen vector centrality. 
    This method takes into consideration the amount of memory
    allocated by the user to calculate degree centrality.
    
    Parameters
    ----------
    timeseries_data : numpy array
        timeseries of the input subject
    weight_options : string (list of boolean)
        list of two booleans for binarize and weighted options respectively
    method_options : string (list of boolean)
        list of two booleans for binarize and weighted options respectively
    memory_allocated : a string
        amount of memory allocated to degree centrality
    scans : an integer
        number of scans in the subject
    r_value :a float
        threshold value
    
    Returns
    -------
    out_list : string (list of tuples)
        list of tuple containing output name to be used to store nifti image
        for centrality and centrality matrix 
    
    Raises
    ------
    Exception
    """

    import numpy as np
    from CPAC.network_centrality import load_mat,\
                                        calc_corrcoef,\
                                        calc_blocksize,\
                                        calc_eigenV,\
                                        calc_threshold
    #from scipy.sparse import dok_matrix

    try:
        out_list = []
        timeseries = load_mat(timeseries_data)
        shape = timeseries.shape
        try:
            block_size = calc_blocksize(shape, memory_allocated)
        except:
            raise Exception("Error in calculating block size")

        r_matrix = None

        if method_options[0]:
            if weight_options[0]:
                degree_mat_binarize = np.zeros(shape[0], dtype=np.float32)
                out_list.append(
                    ('degree_centrality_binarize', degree_mat_binarize))

            if weight_options[1]:
                degree_mat_weighted = np.zeros(shape[0], dtype=np.float32)
                out_list.append(
                    ('degree_centrality_weighted', degree_mat_weighted))

        if method_options[1]:
            r_matrix = np.zeros((shape[0], shape[0]), dtype=np.float32)

        j = 0
        i = block_size

        while i <= timeseries.shape[0]:

            print "running block -> ", i + j

            try:
                corr_matrix = np.nan_to_num(
                    calc_corrcoef(timeseries[j:i].T, timeseries.T))
            except:
                raise Exception(
                    "Error in calcuating block wise correlation for the block %,%"
                    % (j, i))

            if r_value == None:
                r_value = calc_threshold(1,
                                         threshold,
                                         scans,
                                         corr_matrix,
                                         full_matrix=False)

            if method_options[1]:
                r_matrix[j:i] = corr_matrix

            if method_options[0]:
                if weight_options[0]:
                    degree_mat_binarize[j:i] = np.sum(
                        (corr_matrix > r_value).astype(np.float32), axis=1) - 1
                if weight_options[1]:
                    degree_mat_weighted[j:i] = np.sum(
                        corr_matrix *
                        (corr_matrix > r_value).astype(np.float32),
                        axis=1) - 1

            j = i
            if i == timeseries.shape[0]:
                break
            elif (i + block_size) > timeseries.shape[0]:
                i = timeseries.shape[0]
            else:
                i += block_size

        try:
            if method_options[1]:
                out_list.extend(calc_eigenV(r_matrix, r_value, weight_options))
        except Exception:
            print "Error in calcuating eigen vector centrality"
            raise

        return out_list

    except Exception:
        print "Error in calcuating Centrality"
        raise
Example #5
0
def get_centrality(timeseries_data, method_options, weight_options, threshold,
                   option, scans, memory_allocated):
    """
    Method to calculate degree and eigen vector centrality
    
    Parameters
    ----------
    weight_options : string (list of boolean)
        list of two booleans for binarize and weighted options respectively
    method_options : string (list of boolean)
        list of two booleans for binarize and weighted options respectively
    threshold_matrix : string (numpy npy file)
        path to file containing thresholded correlation matrix 
    correlation_matrix : string (numpy npy file)
        path to file containing correlation matrix
    template_data : string (numpy npy file)
        path to file containing mask or parcellation unit data    
    
    Returns
    -------
    out_list : string (list of tuples)
        list of tuple containing output name to be used to store nifti image
        for centrality and centrality matrix 
    
    Raises
    ------
    Exception
    """

    import numpy as np
    from CPAC.network_centrality import load_mat,\
                                        calc_corrcoef,\
                                        calc_blocksize,\
                                        calc_threshold,\
                                        calc_eigenV

    out_list = []

    try:

        timeseries = load_mat(timeseries_data)
        shape = timeseries.shape
        block_size = calc_blocksize(shape, memory_allocated)
        corr_matrix = np.zeros((shape[0], shape[0]), dtype=np.float16)
        j = 0
        i = block_size

        while i <= timeseries.shape[0]:
            print "block -> ", i + j
            temp_matrix = np.nan_to_num(
                calc_corrcoef(timeseries[j:i].T, timeseries.T))
            corr_matrix[j:i] = temp_matrix
            j = i
            if i == timeseries.shape[0]:
                break
            elif (i + block_size) > timeseries.shape[0]:
                i = timeseries.shape[0]
            else:
                i += block_size

        r_value = calc_threshold(option,
                                 threshold,
                                 scans,
                                 corr_matrix,
                                 full_matrix=True)

        print "r_value -> ", r_value

        if method_options[0]:

            print "calculating binarize degree centrality matrix..."
            degree_matrix = np.sum(corr_matrix > r_value, axis=1) - 1
            out_list.append(('degree_centrality_binarize', degree_matrix))

            print "calculating weighted degree centrality matrix..."
            degree_matrix = np.sum(corr_matrix * (corr_matrix > r_value),
                                   axis=1) - 1
            out_list.append(('degree_centrality_weighted', degree_matrix))

        if method_options[1]:
            out_list.extend(calc_eigenV(corr_matrix, r_value, weight_options))

    except Exception:
        print "Error while calculating centrality"
        raise

    return out_list
def get_centrality_opt(timeseries_data,
                       method_options,
                       weight_options,
                       memory_allocated,
                       threshold,
                       scans,
                       r_value = None):
    
    """
    Method to calculate degree and eigen vector centrality. 
    This method takes into consideration the amount of memory
    allocated by the user to calculate degree centrality.
    
    Parameters
    ----------
    timeseries_data : numpy array
        timeseries of the input subject
    weight_options : string (list of boolean)
        list of two booleans for binarize and weighted options respectively
    method_options : string (list of boolean)
        list of two booleans for binarize and weighted options respectively
    memory_allocated : a string
        amount of memory allocated to degree centrality
    scans : an integer
        number of scans in the subject
    r_value :a float
        threshold value
    
    Returns
    -------
    out_list : string (list of tuples)
        list of tuple containing output name to be used to store nifti image
        for centrality and centrality matrix 
    
    Raises
    ------
    Exception
    """
    
    
    import numpy as np
    from CPAC.network_centrality import load_mat,\
                                        calc_corrcoef,\
                                        calc_blocksize,\
                                        calc_eigenV,\
                                        calc_threshold
    #from scipy.sparse import dok_matrix
    
    try:                                    
        out_list =[]
        timeseries = load_mat(timeseries_data)
        shape = timeseries.shape
        try:
            block_size = calc_blocksize(shape, memory_allocated)
        except:
            raise Exception("Error in calculating block size")
        
        r_matrix = None
        
        if method_options[0]:
            if weight_options[0]:
                degree_mat_binarize = np.zeros(shape[0], dtype= np.float32)
                out_list.append(('degree_centrality_binarize', degree_mat_binarize))
    
            if weight_options[1]:
                degree_mat_weighted = np.zeros(shape[0], dtype = np.float32)
                out_list.append(('degree_centrality_weighted', degree_mat_weighted))
            
        if method_options[1]:
            r_matrix = np.zeros((shape[0], shape[0]), dtype = np.float32)
    
        j=0
        i = block_size
        
        while i <= timeseries.shape[0]:
           
            print "running block -> ", i + j
            
            try:
                corr_matrix = np.nan_to_num(calc_corrcoef(timeseries[j:i].T, timeseries.T))
            except:
                raise Exception("Error in calcuating block wise correlation for the block %,%"%(j,i))
           
            if r_value == None:
                r_value = calc_threshold(1, threshold, scans, corr_matrix, full_matrix = False)
                
            if method_options[1]:
                r_matrix[j:i] = corr_matrix 
                
            if method_options[0]:
                if weight_options[0]:
                    degree_mat_binarize[j:i] = np.sum((corr_matrix > r_value).astype(np.float32), axis = 1) -1
                if weight_options[1]:
                    degree_mat_weighted[j:i] = np.sum(corr_matrix*(corr_matrix > r_value).astype(np.float32), axis = 1) -1
        
            j = i   
            if i == timeseries.shape[0]:
                break
            elif (i+block_size) > timeseries.shape[0]: 
                i = timeseries.shape[0] 
            else:
                i += block_size    
        
        try:
            if method_options[1]:
                out_list.extend(calc_eigenV(r_matrix, r_value, weight_options))
        except Exception:
            print "Error in calcuating eigen vector centrality"
            raise
        
        return out_list   
    
    except Exception: 
        print "Error in calcuating Centrality"
        raise
def get_centrality(timeseries_data, 
                   method_options,
                   weight_options,
                   threshold,
                   option,
                   scans,
                   memory_allocated):
    
    """
    Method to calculate degree and eigen vector centrality
    
    Parameters
    ----------
    weight_options : string (list of boolean)
        list of two booleans for binarize and weighted options respectively
    method_options : string (list of boolean)
        list of two booleans for binarize and weighted options respectively
    threshold_matrix : string (numpy npy file)
        path to file containing thresholded correlation matrix 
    correlation_matrix : string (numpy npy file)
        path to file containing correlation matrix
    template_data : string (numpy npy file)
        path to file containing mask or parcellation unit data    
    
    Returns
    -------
    out_list : string (list of tuples)
        list of tuple containing output name to be used to store nifti image
        for centrality and centrality matrix 
    
    Raises
    ------
    Exception
    """
    
    import numpy as np
    from CPAC.network_centrality import load_mat,\
                                        calc_corrcoef,\
                                        calc_blocksize,\
                                        calc_threshold,\
                                        calc_eigenV
    
    
    out_list=[]
    
    try:
        
        timeseries = load_mat(timeseries_data)
        shape = timeseries.shape
        block_size = calc_blocksize(shape, memory_allocated)
        corr_matrix = np.zeros((shape[0], shape[0]), dtype = np.float16)
        j=0
        i = block_size
        
        while i <= timeseries.shape[0]:
            print "block -> ", i + j
            temp_matrix = np.nan_to_num(calc_corrcoef(timeseries[j:i].T, timeseries.T))
            corr_matrix[j:i] = temp_matrix
            j = i   
            if i == timeseries.shape[0]:
                break
            elif (i+block_size) > timeseries.shape[0]: 
                i = timeseries.shape[0] 
            else:
                i += block_size
        
        r_value = calc_threshold(option, 
                                 threshold, 
                                 scans, 
                                 corr_matrix,
                                 full_matrix = True)
        
        print "r_value -> ", r_value
                
        if method_options[0]:
            
            print "calculating binarize degree centrality matrix..."
            degree_matrix = np.sum( corr_matrix > r_value , axis = 1)  -1
            out_list.append(('degree_centrality_binarize', degree_matrix))
            
            print "calculating weighted degree centrality matrix..."
            degree_matrix = np.sum( corr_matrix*(corr_matrix > r_value), axis= 1) -1
            out_list.append(('degree_centrality_weighted', degree_matrix))
            
        
        if method_options[1]:
            out_list.extend(calc_eigenV(corr_matrix, 
                                           r_value, 
                                           weight_options))
    
    except Exception:
        print "Error while calculating centrality"
        raise
    
    return out_list