def get_partitions(sparsity, level): try: path = f'/data01/ayagoz/sparse_32_concon_HCP/connectomes/agreement_graphs/level{level}/agreement_adj_{level}_{sparsity}.npy' adj = np.load(path) partition = compute_partition(adj) parcellation_path = f'/data01/ayagoz/sparse_32_concon_HCP/parcellations/ensemble_parcellation/CSPA/level{level}/CSPA_{level}_{sparsity}.npy' np.save(parcellation_path, partition) except BaseException as e: print(e)
def do_job(r=None): level = 3 sparsity = 10 p = '/data01/ayagoz/sparse_32_concon_HCP/' random = np.random.RandomState(r) files = glob( f'{p}parcellations/connectivity_parcellation_level{level}/{sparsity}/*.npy' ) random.shuffle(files) partitions = [] for file in files[:50]: labels = np.load(file) partitions.append(labels) adj = compute_agreements(np.array(partitions)) labels = compute_partition(adj) np.save(f'/home/kurmukov/subject_stability/CSPA/CSPA_3_10_{r}.npy', labels)
def get_partitions_at_levels(subject_id, labels_to_drop): for sparsity in tqdm(range(10, 101, 10)): try: path = f'/data01/ayagoz/sparse_32_concon_HCP/ConCon_resolution/{sparsity}/{subject_id}.npz' adj = load_concon(path, labels_to_drop=labels_to_drop) partition_level_1 = compute_partition(adj) partition_level_2 = cluster_subgraphs(adj, partition_level_1, 5, 200) partition_level_3 = cluster_subgraphs(adj, partition_level_2, 5, 360) parcellation_path = '/data01/ayagoz/sparse_32_concon_HCP/parcellations' np.save( f'{parcellation_path}/connectivity_parcellation_level1/{sparsity}/{subject_id}.npy', partition_level_1) np.save( f'{parcellation_path}/connectivity_parcellation_level2/{sparsity}/{subject_id}.npy', partition_level_2) np.save( f'{parcellation_path}/connectivity_parcellation_level3/{sparsity}/{subject_id}.npy', partition_level_3) except BaseException as e: print(e, subject_id)
for r in tqdm(range(random_state, random_state + 10)): random = np.random.RandomState(random_state) sparsity = 10 average_desikan = np.load( '/data01/ayagoz/sparse_32_concon_HCP/parcellations/desikan_aparc_average_1113.npy', allow_pickle=True) paths = glob( f'/data01/ayagoz/sparse_32_concon_HCP/connectomes/ConCon_resolution/{sparsity}/*.npz' ) random.shuffle(paths) adj = load_concon(paths[0], labels_to_drop=average_desikan) for file in tqdm(paths[1:50]): adj_temp = load_concon(file, labels_to_drop=average_desikan) adj += adj_temp partition_level_1 = compute_partition(adj) partition_level_2 = cluster_subgraphs(adj, partition_level_1, 5, 200) partition_level_3 = cluster_subgraphs(adj, partition_level_2, 5, 360) np.save( f'/home/kurmukov/subject_stability/aver_50_level1_{sparsity}_{r}.npy', partition_level_1) np.save( f'/home/kurmukov/subject_stability/aver_50_level2_{sparsity}_{r}.npy', partition_level_2) np.save( f'/home/kurmukov/subject_stability/aver_50_level3_{sparsity}_{r}.npy', partition_level_3)