Example #1
0
def new_test_group_stability_matrix():
    """
    Tests group_stability_matrix method.  This creates a dataset of blobs varying only by additive zero-mean gaussian
    noise and calculates the group stability matrix.
    """

    import utils
    import basc

    bootstrap = 20
    blobs = generate_blobs()

    ism_dataset = np.zeros((5, blobs.shape[0], blobs.shape[0]))

    indiv_stability_list = []

    for i in range(ism_dataset.shape[0]):
        ism_dataset[i] = utils.individual_stability_matrix(
            blobs + 0.2 * np.random.randn(blobs.shape[0], blobs.shape[1]),
            bootstrap,
            3,
            similarity_metric='correlation',
            affinity_threshold=0.0)
        f = 'ism_dataset_%i.npy' % i
        indiv_stability_list.append(f)
        np.save(f, ism_dataset[i])

    #indiv_stability_list=ism_list
    n_bootstraps = 10
    n_clusters = 3

    G = basc.map_group_stability(indiv_stability_list, n_bootstraps,
                                 n_clusters)

    return G
Example #2
0
def test_individual_stability_matrix():
    """
    Tests individual_stability_matrix method on three gaussian blobs.
    """
    import utils
    import numpy as np
    import scipy as sp
    desired = np.load(home + '/git_repo/PyBASC/tests/ism_test.npy')
    blobs = generate_blobs()
    ism = utils.individual_stability_matrix(blobs,
                                            20,
                                            3,
                                            similarity_metric='correlation')
    #how to use test here?
    #    np.corrcoef(ism.flatten(),desired.flatten())
    #    np.testing.assert_equal(ism,desired)
    #
    #    corr=np.array(sp.spatial.distance.cdist(ism, desired, metric = 'correlation'))
    #
    assert False
Example #3
0
def test_compare_stability_matrices():

    import utils
    import basc

    bootstrap = 20
    blobs = generate_blobs()
    n_bootstraps = 10
    n_clusters = 5
    subjects = 20

    ism_dataset = np.zeros((subjects, blobs.shape[0], blobs.shape[0]))
    ism_list = []
    for i in range(ism_dataset.shape[0]):
        ism_dataset[i] = utils.individual_stability_matrix(
            blobs + 0.2 * np.random.randn(blobs.shape[0], blobs.shape[1]),
            n_bootstraps,
            n_clusters,
            affinity_threshold=0.0)
        f = 'ism_dataset_%i.npy' % i
        ism_list.append(f)
        np.save(f, ism_dataset[i])

    indiv_stability_list = ism_list

    G = basc.map_group_stability(ism_list, n_bootstraps, n_clusters)

    gsm = np.load(G)
    gsm = gsm.astype("float64")

    corr = []
    corr = np.zeros((subjects, 1))
    for j in range(ism_dataset.shape[0]):
        ism = ism_dataset[j].astype("float64")
        corr[j] = utils.compare_stability_matrices(gsm, ism)

    meandist5 = corr.mean()
    vardist5 = corr.var()
    sumdist5 = corr.cumsum()
Example #4
0
def test_cluster_matrix_average():

    import utils
    import basc
    import matplotlib.pyplot as plt

    roi_mask_nparray = 'empty'
    blobs = generate_blobs()
    n_clusters = 3
    similarity_metric = 'correlation'
    ism = utils.individual_stability_matrix(blobs, 100, n_clusters,
                                            similarity_metric)

    y_predict = utils.cluster_timeseries(blobs,
                                         roi_mask_nparray,
                                         n_clusters,
                                         similarity_metric,
                                         affinity_threshold=0.0,
                                         neighbors=10)
    cluster_voxel_scores, K_mask = utils.cluster_matrix_average(ism, y_predict)

    plt.imshow(K_mask)
Example #5
0
def test_data_compress_expand():

    import os
    import numpy as np
    import nibabel as nb
    import utils
    import pandas as pd
    import sklearn as sk

    #Setup
    subject_file = home + '/git_repo/PyBASC/sample_data/sub1/Func_Quarter_Res.nii.gz'
    roi_mask_file = home + '/git_repo/PyBASC/masks/LC_Quarter_Res.nii.gz'
    roi2_mask_file = home + '/git_repo/PyBASC/masks/RC_Quarter_Res.nii.gz'
    n_bootstraps = 100
    n_clusters = 10
    output_size = 20
    cross_cluster = True
    cbb_block_size = None
    affinity_threshold = 0.5

    print('Calculating individual stability matrix of:', subject_file)

    data = nb.load(subject_file).get_data().astype('float32')
    print('Data Loaded')

    if (roi2_mask_file != None):
        print('Setting up NIS')
        roi_mask_file_nb = nb.load(roi_mask_file)
        roi2_mask_file_nb = nb.load(roi2_mask_file)

        roi_mask_nparray = nb.load(roi_mask_file).get_data().astype(
            'float32').astype('bool')
        roi2_mask_nparray = nb.load(roi2_mask_file).get_data().astype(
            'float32').astype('bool')

        roi1data = data[roi_mask_nparray]
        roi2data = data[roi2_mask_nparray]

        #add code that uploads the roi1data and roi2data, divides by the mean and standard deviation of the timeseries
        roi1data = sk.preprocessing.normalize(roi1data, norm='l2')
        roi2data = sk.preprocessing.normalize(roi2data, norm='l2')

        print('Compressing data')
        data_dict1 = utils.data_compression(roi1data.T, roi_mask_file_nb,
                                            roi_mask_nparray, output_size)
        Y1_compressed = data_dict1['data']
        Y1_compressed = Y1_compressed.T
        Y1_labels = pd.DataFrame(data_dict1['labels'])
        Y1_labels = np.array(Y1_labels)
        print('Y1 compressed')

        print('Compressing Y2')

        data_dict2 = utils.data_compression(roi2data.T, roi2_mask_file_nb,
                                            roi2_mask_nparray, output_size)
        Y2_compressed = data_dict2['data']
        Y2_compressed = Y2_compressed.T
        Y2_labels = pd.DataFrame(data_dict2['labels'])
        print('Y2 compressed')

        print('Going into ism')
        ism = utils.individual_stability_matrix(Y1_compressed, n_bootstraps,
                                                n_clusters, Y2_compressed,
                                                cross_cluster, cbb_block_size,
                                                affinity_threshold)
        #ism=ism/n_bootstraps # was already done in ism

        print('Expanding ism')
        voxel_num = roi1data.shape[0]
        voxel_ism = utils.expand_ism(ism, Y1_labels)

        #voxel_ism=voxel_ism*100 # was already done in ism
        voxel_ism = voxel_ism.astype("uint8")
Example #6
0
def nifti_individual_stability(subject_file,
                               roi_mask_file,
                               n_bootstraps,
                               n_clusters,
                               output_size,
                               similarity_metric,
                               cross_cluster=False,
                               roi2_mask_file=None,
                               blocklength=1,
                               cbb_block_size=None,
                               affinity_threshold=0.5):
    """
    Calculate the individual stability matrix for a single subject by using Circular Block Bootstrapping method
    for time-series data.

    Parameters
    ----------
    subject_file : string
        Nifti file of a subject
    roi_mask_file : string
        Region of interest (this method is too computationally intensive to perform on a whole-brain volume)
    n_bootstraps : integer
        Number of bootstraps
    n_clusters : integer
        Number of clusters
    cbb_block_size : integer, optional
        Size of the time-series block when performing circular block bootstrap
    affinity_threshold : float, optional
        Minimum threshold for similarity matrix based on correlation to create an edge

    Returns
    -------
    ism : array_like
        Individual stability matrix of shape (`V`, `V`), `V` voxels
    """

    import os
    import numpy as np
    import nibabel as nb
    import utils
    import pandas as pd
    import sklearn as sk
    from sklearn import preprocessing

    print('Calculating individual stability matrix of:', subject_file)

    data = nb.load(subject_file).get_data().astype('float32')
    #print( 'Data Loaded')
    roi_mask_file_nb = nb.load(roi_mask_file)
    roi_mask_nparray = nb.load(roi_mask_file).get_data().astype(
        'float32').astype('bool')
    #    roi_np_mask_file = os.path.join(os.getcwd(), 'mask_nparray.npy')
    #    np.save(roi_np_mask_file, roi_mask_nparray)
    #    import pdb; pdb.set_trace()

    roi1data = data[roi_mask_nparray]
    roi1data = sk.preprocessing.normalize(roi1data, norm='l2')
    data_dict1 = utils.data_compression(roi1data.T, roi_mask_file_nb,
                                        roi_mask_nparray, output_size)
    Y1_compressed = data_dict1['data']
    #Y1_compressed = Y1_compressed.T
    Y1_labels = pd.DataFrame(data_dict1['labels'])
    Y1_labels = np.array(Y1_labels)
    #import pdb;pdb.set_trace()
    if (roi2_mask_file != None):

        roi2_mask_file_nb = nb.load(roi2_mask_file)
        roi2_mask_nparray = nb.load(roi2_mask_file).get_data().astype(
            'float32').astype('bool')
        roi2data = data[roi2_mask_nparray]
        roi2data = sk.preprocessing.normalize(roi2data, norm='l2')
        #print( 'Compressing Y2')
        output_size2 = output_size + 5
        data_dict2 = utils.data_compression(roi2data.T, roi2_mask_file_nb,
                                            roi2_mask_nparray, output_size2)

        Y2_compressed = data_dict2['data']
        Y2_labels = pd.DataFrame(data_dict2['labels'])
        Y2_labels = np.array(Y2_labels)

        print('Going into ism')
        #import pdb;pdb.set_trace()
        ism = utils.individual_stability_matrix(Y1_compressed,
                                                roi_mask_nparray, n_bootstraps,
                                                n_clusters, similarity_metric,
                                                Y2_compressed, cross_cluster,
                                                cbb_block_size, blocklength,
                                                affinity_threshold)
        #import pdb; pdb.set_trace()
        #ism=ism/n_bootstraps # was already done in ism

        print('Expanding ism')
        voxel_num = roi1data.shape[0]
        voxel_ism = utils.expand_ism(ism, Y1_labels)

        #voxel_ism=voxel_ism*100 # was already done in ism
        voxel_ism = voxel_ism.astype("uint8")

        ism_file = os.path.join(os.getcwd(), 'individual_stability_matrix.npy')
        np.save(ism_file, voxel_ism)
        #print ('Saving individual stability matrix %s for %s' % (ism_file, subject_file))
        return ism_file

    else:

        print('debug2')
        #Y2_compressed=None
        #import pdb; pdb.set_trace()
        Y2 = None
        ism = utils.individual_stability_matrix(Y1_compressed,
                                                roi_mask_nparray, n_bootstraps,
                                                n_clusters, similarity_metric,
                                                Y2, cross_cluster,
                                                cbb_block_size, blocklength,
                                                affinity_threshold)

        #ism=ism/n_bootstraps # was already done in ism
        print('debug3')
        print(Y1_labels)

        print('expanding ism')
        voxel_num = roi1data.shape[0]
        voxel_ism = utils.expand_ism(ism, Y1_labels)
        print('debug3b')
        #voxel_ism=voxel_ism*100 # was already done in ism
        voxel_ism = voxel_ism.astype("uint8")

        ism_file = os.path.join(os.getcwd(), 'individual_stability_matrix.npy')
        np.save(ism_file, voxel_ism)

        print('debug4')
        #print( 'Saving individual stability matrix %s for %s' % (ism_file, subject_file))
        return ism_file
    return ism_file