Exemple #1
0
def apply_parcellation(subject):
    '''
    Applies parcellation for all sparsity levels
    
    Parameters
    ------
    
    subject - int,
     subject id
    '''
    average_desikan = np.load(
        '/data01/ayagoz/sparse_32_concon_HCP/parcellations/desikan_aparc_average_1113.npy',
        allow_pickle=True)
    source = '/data01/ayagoz/sparse_32_concon_HCP/connectomes/ConCon_resolution'
    parcellation = '/data01/ayagoz/sparse_32_concon_HCP/parcellations/connectivity_parcellation_level'
    target_folder = '/data01/ayagoz/sparse_32_concon_HCP/connectomes/individual_louvain_parcellation'

    for sparsity in range(10, 101, 10):
        try:
            adj = load_concon(f'{source}/{sparsity}/{subject}.npz',
                              labels_to_drop=average_desikan)
            #             print(f'{source}/{sparsity}/{subject}.npz')
            for i in [1, 2, 3]:
                labels_parcellation = np.load(
                    f'{parcellation}{i}/{sparsity}/{subject}.npy')
                #                 print(f'{parcellation}{i}/{sparsity}/ensemble_{i}_{sparsity}.npy')
                adj_parcellation = squeeze_matrix(adj, labels_parcellation)
                np.save(f'{target_folder}/level{i}/{sparsity}/{subject}.npy',
                        adj_parcellation)


#                 print(f'{target_folder}/level{i}/{sparsity}/{subject}.npy')
        except BaseException as e:
            print(e, subject, sparsity, plevel)
def apply_parcellation(subject, parcellation_folder, source_folder,
                       target_folder):
    '''
    Applies parcellation for all sparsity levels
    
    Parameters
    ------
    
    subject - int,
     subject id
     
    parcellation_folder - str,
     folder with labels to apply
     
    source_folder - str,
     folder with concon sparsity folders
     
    target_folder - str,
     folder to save to
    '''
    for sparsity in range(10, 101, 10):
        try:
            adj = load_concon(f'{source_folder}/{sparsity}/{subject}.npz')
            labels_parcellation = np.load(
                f'{parcellation_folder}/{subject}.npy')
            adj_parcellation = squeeze_matrix(adj,
                                              labels_parcellation,
                                              drop_minus_1=True)
            np.save(f'{target_folder}/{sparsity}/{subject}.npy',
                    adj_parcellation)
        except BaseException as e:
            print(e, subject)
def get_partitions_at_levels(subject_id, labels_to_drop):

    for sparsity in tqdm(range(10, 101, 10)):
        try:
            path = f'/data01/ayagoz/sparse_32_concon_HCP/ConCon_resolution/{sparsity}/{subject_id}.npz'
            adj = load_concon(path, labels_to_drop=labels_to_drop)
            partition_level_1 = compute_partition(adj)
            partition_level_2 = cluster_subgraphs(adj, partition_level_1, 5,
                                                  200)
            partition_level_3 = cluster_subgraphs(adj, partition_level_2, 5,
                                                  360)
            parcellation_path = '/data01/ayagoz/sparse_32_concon_HCP/parcellations'
            np.save(
                f'{parcellation_path}/connectivity_parcellation_level1/{sparsity}/{subject_id}.npy',
                partition_level_1)
            np.save(
                f'{parcellation_path}/connectivity_parcellation_level2/{sparsity}/{subject_id}.npy',
                partition_level_2)
            np.save(
                f'{parcellation_path}/connectivity_parcellation_level3/{sparsity}/{subject_id}.npy',
                partition_level_3)
        except BaseException as e:
            print(e, subject_id)
Exemple #4
0
def apply_parcellation(subject, parcellation_folder, source_folder,
                       target_folder):
    '''
    Applies parcellation for all sparsity levels
    
    Parameters
    ------
    
    subject - int,
     subject id
     
    parcellation_folder - str,
     folder with labels to apply
     
    source_folder - str,
     folder with concon sparsity folders
     
    target_folder - str,
     folder to save to
    '''
    plevel = parcellation_folder[-1]
    average_desikan = np.load(
        '/data01/ayagoz/sparse_32_concon_HCP/parcellations/desikan_aparc_average_1113.npy',
        allow_pickle=True)

    for sparsity in range(10, 101, 10):
        try:
            adj = load_concon(f'{source_folder}/{sparsity}/{subject}.npz',
                              labels_to_drop=average_desikan)
            labels_parcellation = np.load(
                f'{parcellation_folder}/{sparsity}/ensemble_{plevel}_{sparsity}.npy'
            )
            adj_parcellation = squeeze_matrix(adj, labels_parcellation)
            np.save(f'{target_folder}/{sparsity}/{subject}.npy',
                    adj_parcellation)
        except BaseException as e:
            print(e, subject, sparsity, plevel)
Exemple #5
0
def apply_parcellation(subject):
    '''
    Applies parcellation for all sparsity levels
    
    Parameters
    ------
    
    subject - int,
     subject id
    '''
    concon_folder = '/data01/ayagoz/sparse_32_concon_HCP/connectomes/ConCon_resolution'
    aver_path = f'/data01/ayagoz/sparse_32_concon_HCP/parcellations/ensemble_parcellation/average_network_partition/level{plevel}'
    cspa_path = f'/data01/ayagoz/sparse_32_concon_HCP/parcellations/ensemble_parcellation/CSPA/level{plevel}'
    aver_target = f'/data01/ayagoz/sparse_32_concon_HCP/connectomes/Ensemble_parcellation/Aver_level{plevel}'
    cspa_target = f'/data01/ayagoz/sparse_32_concon_HCP/connectomes/Ensemble_parcellation/CSPA_level{plevel}'
    average_desikan = np.load(
        '/data01/ayagoz/sparse_32_concon_HCP/parcellations/desikan_aparc_average_1113.npy',
        allow_pickle=True)

    for sparsity in range(10, 101, 10):
        try:
            adj = load_concon(f'{concon_folder}/{sparsity}/{subject}.npz',
                              labels_to_drop=average_desikan)  #
            labels_parcellation = np.load(
                f'{aver_path}/aver_level{plevel}_{sparsity}.npy')  #
            adj_parcellation = squeeze_matrix(adj, labels_parcellation)  #
            np.save(f'{aver_target}/{sparsity}/{subject}.npy',
                    adj_parcellation)

            labels_parcellation = np.load(
                f'{cspa_path}/CSPA_{plevel}_{sparsity}.npy')  #
            adj_parcellation = squeeze_matrix(adj, labels_parcellation)  #
            np.save(f'{cspa_target}/{sparsity}/{subject}.npy',
                    adj_parcellation)
        except BaseException as e:
            print(e, subject, sparsity, plevel)
Exemple #6
0
if __name__ == "__main__":

    random_state = int(argv[1])
    for r in tqdm(range(random_state, random_state + 10)):
        random = np.random.RandomState(random_state)
        sparsity = 10
        average_desikan = np.load(
            '/data01/ayagoz/sparse_32_concon_HCP/parcellations/desikan_aparc_average_1113.npy',
            allow_pickle=True)

        paths = glob(
            f'/data01/ayagoz/sparse_32_concon_HCP/connectomes/ConCon_resolution/{sparsity}/*.npz'
        )
        random.shuffle(paths)
        adj = load_concon(paths[0], labels_to_drop=average_desikan)

        for file in tqdm(paths[1:50]):
            adj_temp = load_concon(file, labels_to_drop=average_desikan)
            adj += adj_temp

        partition_level_1 = compute_partition(adj)
        partition_level_2 = cluster_subgraphs(adj, partition_level_1, 5, 200)
        partition_level_3 = cluster_subgraphs(adj, partition_level_2, 5, 360)

        np.save(
            f'/home/kurmukov/subject_stability/aver_50_level1_{sparsity}_{r}.npy',
            partition_level_1)
        np.save(
            f'/home/kurmukov/subject_stability/aver_50_level2_{sparsity}_{r}.npy',
            partition_level_2)