def thresh_and_fit(dens_thresh, thr, ts_within_nodes, conn_model, network, ID, dir_path, mask, node_size): from pynets import utils, thresholding, graphestimation thr_perc = 100 * float(thr) edge_threshold = "%s%s" % (str(thr_perc), '%') if not dens_thresh: print("%s%.2f%s" % ('\nThresholding proportionally at: ', thr_perc, '% ...\n')) else: print("%s%.2f%s" % ('\nThresholding to achieve density of: ', thr_perc, '% ...\n')) # Fit mat conn_matrix = graphestimation.get_conn_matrix(ts_within_nodes, conn_model) # Save unthresholded unthr_path = utils.create_unthr_path(ID, network, conn_model, mask, dir_path) np.save(unthr_path, conn_matrix) if dens_thresh is False: conn_matrix_thr = thresholding.threshold_proportional( conn_matrix, float(thr)) else: conn_matrix_thr = thresholding.density_thresholding( conn_matrix, float(thr)) # Save thresholded mat est_path = utils.create_est_path(ID, network, conn_model, thr, mask, dir_path, node_size) np.save(est_path, conn_matrix_thr) return conn_matrix_thr, edge_threshold, est_path, thr, node_size, network
def density_thresholding(ts_within_spheres, conn_model, NETWORK, ID, dens_thresh, dir_path): thr = 0.0 [conn_matrix, est_path] = graphestimation.get_conn_matrix(ts_within_spheres, conn_model, NETWORK, ID, dir_path, thr) conn_matrix = normalize(conn_matrix) np.fill_diagonal(conn_matrix, 0) i = 1 thr_max = 0.40 G = nx.from_numpy_matrix(conn_matrix) density = nx.density(G) while float(thr) <= float(thr_max) and float(density) > float(dens_thresh): thr = float(thr) + float(0.01) conn_matrix = threshold_absolute(conn_matrix, thr) G = nx.from_numpy_matrix(conn_matrix) density = nx.density(G) print('Iteratively thresholding -- Iteration ' + str(i) + ' -- with absolute thresh: ' + str(thr) + ' and Density: ' + str(density) + '...') i = i + 1 edge_threshold = str(float(thr) * 100) + '%' est_path2 = est_path.split('_0.')[0] + '_' + str(dens_thresh) + '.txt' os.rename(est_path, est_path2) return (conn_matrix, est_path2, edge_threshold, dens_thresh)
def test_get_conn_matrix_cov(): base_dir = str(Path(__file__).parent / "examples") #ase_dir = '/Users/rxh180012/PyNets-development/tests/examples' dir_path = base_dir + '/997' time_series_file = dir_path + '/coords_power_2011/997_wb_net_ts.txt' time_series = np.genfromtxt(time_series_file) conn_model = 'cov' node_size = 2 smooth = 2 dens_thresh = False network = 'Default' ID = '997' mask = None min_span_tree = False disp_filt = False parc = None prune = 1 atlas_select = 'whole_brain_cluster_labels_PCA200' uatlas_select = None labels_file_path = dir_path + '/whole_brain_cluster_labels_PCA200/Default_func_labelnames_wb.pkl' labels_file = open(labels_file_path, 'rb') label_names = pickle.load(labels_file) coord_file_path = dir_path + '/whole_brain_cluster_labels_PCA200/Default_func_coords_wb.pkl' coord_file = open(coord_file_path, 'rb') coords = pickle.load(coord_file) vox_array = None start_time = time.time() [ conn_matrix, conn_model, dir_path, node_size, smooth, dens_thresh, network, ID, mask, min_span_tree, disp_filt, parc, prune, atlas_select, uatlas_select, label_names, coords ] = graphestimation.get_conn_matrix(time_series, conn_model, dir_path, node_size, smooth, dens_thresh, network, ID, mask, min_span_tree, disp_filt, parc, prune, atlas_select, uatlas_select, label_names, coords, vox_array) print("%s%s%s" % ('get_conn_matrix --> finished: ', str(np.round(time.time() - start_time, 1)), 's')) assert conn_matrix is not None assert conn_model is not None assert dir_path is not None assert node_size is not None assert smooth is not None assert dens_thresh is not None assert network is not None assert ID is not None #assert mask is not None assert min_span_tree is not None assert disp_filt is not None #assert parc is not None assert prune is not None assert atlas_select is not None #assert uatlas_select is not None #assert label_names is not None assert coords is not None
def test_get_conn_matrix_cov(): base_dir = str(Path(__file__).parent / "examples") dir_path = base_dir + '/997' time_series_file = dir_path + '/coords_power_2011/997_wb_net_ts.txt' time_series = np.genfromtxt(time_series_file) conn_model = 'cov' conn_matrix = graphestimation.get_conn_matrix(time_series, conn_model) assert conn_matrix is not None
def test_get_conn_matrix(): base_dir = str(Path(__file__).parent / "examples") dir_path = base_dir + '/997' time_series_file = dir_path + '/coords_power_2011/997_wb_net_ts.txt' time_series = np.genfromtxt(time_series_file) conn_model_list = ['sps', 'cov', 'corr', 'partcorr', 'tangent'] for conn_model in conn_model_list: conn_matrix = graphestimation.get_conn_matrix(time_series, conn_model) assert conn_matrix is not None
def thresh_and_fit(adapt_thresh, dens_thresh, thr, ts_within_nodes, conn_model, network, ID, dir_path, mask): from pynets import utils, thresholding, graphestimation ##Adaptive thresholding scenario if adapt_thresh is not False: try: est_path2 = dir_path + '/' + ID + '_structural_est.txt' if os.path.isfile(est_path2) == True: #[conn_matrix_thr, est_path, edge_threshold, thr] = adaptive_thresholding(ts_within_nodes, conn_model, network, ID, est_path2, dir_path) ##Save unthresholded unthr_path = utils.create_unthr_path(ID, network, conn_model, mask, dir_path) #np.savetxt(unthr_path, conn_matrix_thr, delimiter='\t') edge_threshold = str(float(thr) * 100) + '%' else: print('No structural mx found! Exiting...') sys.exit() except: print('No structural mx assigned! Exiting...') sys.exit() else: if not dens_thresh: print( '\nRunning graph estimation and thresholding proportionally at: ' + str(thr) + '% ...\n') else: print( '\nRunning graph estimation and thresholding to achieve density of: ' + str(100 * dens_thresh) + '% ...\n') ##Fit mat conn_matrix = graphestimation.get_conn_matrix(ts_within_nodes, conn_model) ##Save unthresholded unthr_path = utils.create_unthr_path(ID, network, conn_model, mask, dir_path) np.savetxt(unthr_path, conn_matrix, delimiter='\t') if not dens_thresh: ##Save thresholded conn_matrix_thr = thresholding.threshold_proportional( conn_matrix, float(thr)) edge_threshold = str(float(thr) * 100) + '%' est_path = utils.create_est_path(ID, network, conn_model, thr, mask, dir_path) else: conn_matrix_thr = thresholding.density_thresholding( conn_matrix, dens_thresh) edge_threshold = str((1 - float(dens_thresh)) * 100) + '%' est_path = utils.create_est_path(ID, network, conn_model, dens_thresh, mask, dir_path) np.savetxt(est_path, conn_matrix_thr, delimiter='\t') return (conn_matrix_thr, edge_threshold, est_path, thr)
def test_get_conn_matrix_cov(): base_dir = str(Path(__file__).parent / "examples") dir_path = base_dir + '/997' time_series_file = dir_path + '/coords_power_2011/997_wb_net_ts.txt' conn_model = 'cov' time_series = np.genfromtxt(time_series_file) start_time = time.time() conn_matrix = graphestimation.get_conn_matrix(time_series, conn_model) print("%s%s%s" % ('get_conn_matrix --> finished: ', str(np.round(time.time() - start_time, 1)), 's')) assert conn_matrix is not None
def wb_connectome_with_us_atlas_coords(input_file, ID, atlas_select, NETWORK, node_size, mask, thr, parlistfile, all_nets, conn_model, dens_thresh, conf, adapt_thresh, plot_switch, bedpostx_dir): nilearn_atlases = [ 'atlas_aal', 'atlas_craddock_2012', 'atlas_destrieux_2009' ] ##Input is nifti file func_file = input_file ##Test if atlas_select is a nilearn atlas if atlas_select in nilearn_atlases: try: parlistfile = getattr(datasets, 'fetch_%s' % atlas_select)().maps try: label_names = getattr(datasets, 'fetch_%s' % atlas_select)().labels except: label_names = None try: networks_list = getattr(datasets, 'fetch_%s' % atlas_select)().networks except: networks_list = None except: print( 'PyNets is not ready for multi-scale atlases like BASC just yet!' ) sys.exit() ##Fetch user-specified atlas coords [coords, atlas_name, par_max] = nodemaker.get_names_and_coords_of_parcels(parlistfile) atlas_select = atlas_name try: label_names except: label_names = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist() ##Get subject directory path dir_path = os.path.dirname( os.path.realpath(func_file)) + '/' + atlas_select if not os.path.exists(dir_path): os.makedirs(dir_path) ##Get coord membership dictionary if all_nets option triggered if all_nets != None: try: networks_list except: networks_list = None [membership, membership_plotting] = nodemaker.get_mem_dict(func_file, coords, networks_list) ##Describe user atlas coords print('\n' + atlas_name + ' comes with {0} '.format(par_max) + 'parcels' + '\n') print('\n' + 'Stacked atlas coordinates in array of shape {0}.'.format( coords.shape) + '\n') ##Mask coordinates if mask is not None: [coords, label_names] = nodemaker.coord_masker(mask, coords, label_names) ##Save coords and label_names to pickles coord_path = dir_path + '/coords_wb_' + str(thr) + '.pkl' with open(coord_path, 'wb') as f: pickle.dump(coords, f) labels_path = dir_path + '/labelnames_wb_' + str(thr) + '.pkl' with open(labels_path, 'wb') as f: pickle.dump(label_names, f) if bedpostx_dir is not None: from pynets.diffconnectometry import run_struct_mapping FSLDIR = os.environ['FSLDIR'] try: FSLDIR except NameError: print('FSLDIR environment variable not set!') est_path2 = run_struct_mapping(FSLDIR, ID, bedpostx_dir, dir_path, NETWORK, coords, node_size) ##extract time series from whole brain parcellaions: parcellation = nib.load(parlistfile) parcel_masker = input_data.NiftiLabelsMasker(labels_img=parcellation, background_label=0, memory='nilearn_cache', memory_level=5, standardize=True) ts_within_parcels = parcel_masker.fit_transform(func_file, confounds=conf) print('\n' + 'Time series has {0} samples'.format(ts_within_parcels.shape[0]) + '\n') ##Save time series as txt file out_path_ts = dir_path + '/' + ID + '_whole_brain_ts_within_parcels.txt' np.savetxt(out_path_ts, ts_within_parcels) ##Fit connectivity model if adapt_thresh is not False: if os.path.isfile(est_path2) == True: [conn_matrix, est_path, edge_threshold, thr] = thresholding.adaptive_thresholding(ts_within_parcels, conn_model, NETWORK, ID, est_path2, dir_path) else: print('No structural mx found! Exiting...') sys.exit(0) elif dens_thresh is None: edge_threshold = str(float(thr) * 100) + '%' [conn_matrix, est_path] = graphestimation.get_conn_matrix(ts_within_parcels, conn_model, NETWORK, ID, dir_path, thr) conn_matrix = thresholding.threshold_proportional( conn_matrix, float(thr), dir_path) conn_matrix = thresholding.normalize(conn_matrix) elif dens_thresh is not None: [conn_matrix, est_path, edge_threshold, thr] = thresholding.density_thresholding(ts_within_parcels, conn_model, NETWORK, ID, dens_thresh, dir_path) if plot_switch == True: ##Plot connectogram plotting.plot_connectogram(conn_matrix, conn_model, atlas_name, dir_path, ID, NETWORK, label_names) ##Plot adj. matrix based on determined inputs atlast_graph_title = plotting.plot_conn_mat(conn_matrix, conn_model, atlas_name, dir_path, ID, NETWORK, label_names, mask) ##Plot connectome viz for all Yeo networks if all_nets != False: plotting.plot_membership(membership_plotting, conn_matrix, conn_model, coords, edge_threshold, atlas_name, dir_path) else: out_path_fig = dir_path + '/' + ID + '_connectome_viz.png' niplot.plot_connectome(conn_matrix, coords, title=atlast_graph_title, edge_threshold=edge_threshold, node_size=20, colorbar=True, output_file=out_path_fig) return est_path, thr
def network_connectome(input_file, ID, atlas_select, NETWORK, node_size, mask, thr, parlistfile, all_nets, conn_model, dens_thresh, conf, adapt_thresh, plot_switch, bedpostx_dir): nilearn_atlases = [ 'atlas_aal', 'atlas_craddock_2012', 'atlas_destrieux_2009' ] ##Input is nifti file func_file = input_file ##Test if atlas_select is a nilearn atlas if atlas_select in nilearn_atlases: atlas = getattr(datasets, 'fetch_%s' % atlas_select)() try: parlistfile = atlas.maps try: label_names = atlas.labels except: label_names = None try: networks_list = atlas.networks except: networks_list = None except RuntimeError: print('Error, atlas fetching failed.') sys.exit() if parlistfile == None and atlas_select not in nilearn_atlases: ##Fetch nilearn atlas coords [coords, atlas_name, networks_list, label_names] = nodemaker.fetch_nilearn_atlas_coords(atlas_select) if atlas_name == 'Power 2011 atlas': ##Reference RSN list import pkgutil import io network_coords_ref = NETWORK + '_coords.csv' atlas_coords = pkgutil.get_data("pynets", "rsnrefs/" + network_coords_ref) df = pd.read_csv(io.BytesIO(atlas_coords)).ix[:, 0:4] i = 1 net_coords = [] ix_labels = [] for i in range(len(df)): #print("ROI Reference #: " + str(i)) x = int(df.ix[i, 1]) y = int(df.ix[i, 2]) z = int(df.ix[i, 3]) #print("X:" + str(x) + " Y:" + str(y) + " Z:" + str(z)) net_coords.append((x, y, z)) ix_labels.append(i) i = i + 1 #print(net_coords) label_names = ix_labels elif atlas_name == 'Dosenbach 2010 atlas': coords = list(tuple(x) for x in coords) ##Get coord membership dictionary [membership, membership_plotting ] = nodemaker.get_mem_dict(func_file, coords, networks_list) ##Convert to membership dataframe mem_df = membership.to_frame().reset_index() nets_avail = list(set(list(mem_df['index']))) ##Get network name equivalents if NETWORK == 'DMN': NETWORK = 'default' elif NETWORK == 'FPTC': NETWORK = 'fronto-parietal' elif NETWORK == 'CON': NETWORK = 'cingulo-opercular' elif NETWORK not in nets_avail: print('Error: ' + NETWORK + ' not available with this atlas!') sys.exit() ##Get coords for network-of-interest mem_df.loc[mem_df['index'] == NETWORK] net_coords = mem_df.loc[mem_df['index'] == NETWORK][[0]].values[:, 0] net_coords = list(tuple(x) for x in net_coords) ix_labels = mem_df.loc[mem_df['index'] == NETWORK].index.values ####Add code for any special RSN reference lists for the nilearn atlases here##### ##If labels_names are not indices and NETWORK is specified, sub-list label names if label_names != ix_labels: try: label_names = label_names.tolist() except: pass label_names = [label_names[i] for i in ix_labels] ##Get subject directory path dir_path = os.path.dirname( os.path.realpath(func_file)) + '/' + atlas_select if not os.path.exists(dir_path): os.makedirs(dir_path) ##If masking, remove those coords that fall outside of the mask if mask != None: [net_coords, label_names] = nodemaker.coord_masker(mask, net_coords, label_names) ##Save coords and label_names to pickles coord_path = dir_path + '/coords_' + NETWORK + '_' + str(thr) + '.pkl' with open(coord_path, 'wb') as f: pickle.dump(net_coords, f) labels_path = dir_path + '/labelnames_' + NETWORK + '_' + str( thr) + '.pkl' with open(labels_path, 'wb') as f: pickle.dump(label_names, f) if bedpostx_dir is not None: from pynets.diffconnectometry import run_struct_mapping FSLDIR = os.environ['FSLDIR'] try: FSLDIR except NameError: print('FSLDIR environment variable not set!') est_path2 = run_struct_mapping(FSLDIR, ID, bedpostx_dir, dir_path, NETWORK, net_coords, node_size) else: ##Fetch user-specified atlas coords [coords_all, atlas_name, par_max] = nodemaker.get_names_and_coords_of_parcels(parlistfile) coords = list(tuple(x) for x in coords_all) ##Get subject directory path dir_path = os.path.dirname( os.path.realpath(func_file)) + '/' + atlas_name if not os.path.exists(dir_path): os.makedirs(dir_path) ##Get coord membership dictionary try: networks_list except: networks_list = None [membership, membership_plotting] = nodemaker.get_mem_dict(func_file, coords, networks_list) ##Convert to membership dataframe mem_df = membership.to_frame().reset_index() ##Get coords for network-of-interest mem_df.loc[mem_df['index'] == NETWORK] net_coords = mem_df.loc[mem_df['index'] == NETWORK][[0]].values[:, 0] net_coords = list(tuple(x) for x in net_coords) ix_labels = mem_df.loc[mem_df['index'] == NETWORK].index.values try: label_names = [label_names[i] for i in ix_labels] except: label_names = ix_labels if mask != None: [net_coords, label_names] = nodemaker.coord_masker(mask, net_coords, label_names) ##Save coords and label_names to pickles coord_path = dir_path + '/coords_' + NETWORK + '_' + str(thr) + '.pkl' with open(coord_path, 'wb') as f: pickle.dump(net_coords, f) labels_path = dir_path + '/labelnames_' + NETWORK + '_' + str( thr) + '.pkl' with open(labels_path, 'wb') as f: pickle.dump(label_names, f) if bedpostx_dir is not None: from pynets.diffconnectometry import run_struct_mapping est_path2 = run_struct_mapping(FSLDIR, ID, bedpostx_dir, dir_path, NETWORK, net_coords, node_size) ##Generate network parcels image (through refinement, this could be used ##in place of the 3 lines above) #net_parcels_img_path = gen_network_parcels(parlistfile, NETWORK, labels) #parcellation = nib.load(net_parcels_img_path) #parcel_masker = input_data.NiftiLabelsMasker(labels_img=parcellation, background_label=0, memory='nilearn_cache', memory_level=5, standardize=True) #ts_within_parcels = parcel_masker.fit_transform(func_file) #net_ts = ts_within_parcels ##Grow ROIs masker = input_data.NiftiSpheresMasker(seeds=net_coords, radius=float(node_size), allow_overlap=True, memory_level=5, memory='nilearn_cache', verbose=2, standardize=True) ts_within_spheres = masker.fit_transform(func_file, confounds=conf) net_ts = ts_within_spheres ##Save time series as txt file out_path_ts = dir_path + '/' + ID + '_' + NETWORK + '_net_ts.txt' np.savetxt(out_path_ts, net_ts) ##Fit connectivity model if adapt_thresh is not False: if os.path.isfile(est_path2) == True: [conn_matrix, est_path, edge_threshold, thr] = thresholding.adaptive_thresholding(ts_within_spheres, conn_model, NETWORK, ID, est_path2, dir_path) else: print('No structural mx found! Exiting...') sys.exit(0) elif dens_thresh is None: edge_threshold = str(float(thr) * 100) + '%' [conn_matrix, est_path] = graphestimation.get_conn_matrix(ts_within_spheres, conn_model, NETWORK, ID, dir_path, thr) conn_matrix = thresholding.threshold_proportional( conn_matrix, float(thr), dir_path) conn_matrix = thresholding.normalize(conn_matrix) elif dens_thresh is not None: [conn_matrix, est_path, edge_threshold, thr] = thresholding.density_thresholding(ts_within_spheres, conn_model, NETWORK, ID, dens_thresh, dir_path) if plot_switch == True: ##Plot connectogram plotting.plot_connectogram(conn_matrix, conn_model, atlas_name, dir_path, ID, NETWORK, label_names) ##Plot adj. matrix based on determined inputs plotting.plot_conn_mat(conn_matrix, conn_model, atlas_name, dir_path, ID, NETWORK, label_names, mask) ##Plot network time-series plotting.plot_timeseries(net_ts, NETWORK, ID, dir_path, atlas_name, label_names) ##Plot connectome viz for specific Yeo networks title = "Connectivity Projected on the " + NETWORK out_path_fig = dir_path + '/' + ID + '_' + NETWORK + '_connectome_plot.png' niplot.plot_connectome(conn_matrix, net_coords, edge_threshold=edge_threshold, title=title, display_mode='lyrz', output_file=out_path_fig) return est_path, thr
def wb_connectome_with_nl_atlas_coords(input_file, ID, atlas_select, NETWORK, node_size, mask, thr, all_nets, conn_model, dens_thresh, conf, adapt_thresh, plot_switch, bedpostx_dir): nilearn_atlases = [ 'atlas_aal', 'atlas_craddock_2012', 'atlas_destrieux_2009' ] ##Input is nifti file func_file = input_file ##Fetch nilearn atlas coords [coords, atlas_name, networks_list, label_names] = nodemaker.fetch_nilearn_atlas_coords(atlas_select) ##Get subject directory path dir_path = os.path.dirname( os.path.realpath(func_file)) + '/' + atlas_select if not os.path.exists(dir_path): os.makedirs(dir_path) ##Get coord membership dictionary if all_nets option triggered if all_nets != False: try: networks_list except: networks_list = None [membership, membership_plotting] = nodemaker.get_mem_dict(func_file, coords, networks_list) ##Mask coordinates if mask is not None: [coords, label_names] = nodemaker.coord_masker(mask, coords, label_names) ##Save coords and label_names to pickles coord_path = dir_path + '/coords_wb_' + str(thr) + '.pkl' with open(coord_path, 'wb') as f: pickle.dump(coords, f) labels_path = dir_path + '/labelnames_wb_' + str(thr) + '.pkl' with open(labels_path, 'wb') as f: pickle.dump(label_names, f) if bedpostx_dir is not None: from pynets.diffconnectometry import run_struct_mapping FSLDIR = os.environ['FSLDIR'] try: FSLDIR except NameError: print('FSLDIR environment variable not set!') est_path2 = run_struct_mapping(FSLDIR, ID, bedpostx_dir, dir_path, NETWORK, coords, node_size) ##Extract within-spheres time-series from funct file spheres_masker = input_data.NiftiSpheresMasker(seeds=coords, radius=float(node_size), memory='nilearn_cache', memory_level=5, verbose=2, standardize=True) ts_within_spheres = spheres_masker.fit_transform(func_file, confounds=conf) print('\n' + 'Time series has {0} samples'.format(ts_within_spheres.shape[0]) + '\n') ##Save time series as txt file out_path_ts = dir_path + '/' + ID + '_whole_brain_ts_within_spheres.txt' np.savetxt(out_path_ts, ts_within_spheres) ##Fit connectivity model if adapt_thresh is not False: if os.path.isfile(est_path2) == True: [conn_matrix, est_path, edge_threshold, thr] = thresholding.adaptive_thresholding(ts_within_spheres, conn_model, NETWORK, ID, est_path2, dir_path) else: print('No structural mx found! Exiting...') sys.exit(0) elif dens_thresh is None: edge_threshold = str(float(thr) * 100) + '%' [conn_matrix, est_path] = graphestimation.get_conn_matrix(ts_within_spheres, conn_model, NETWORK, ID, dir_path, thr) conn_matrix = thresholding.threshold_proportional( conn_matrix, float(thr), dir_path) conn_matrix = thresholding.normalize(conn_matrix) elif dens_thresh is not None: [conn_matrix, est_path, edge_threshold, thr] = thresholding.density_thresholding(ts_within_spheres, conn_model, NETWORK, ID, dens_thresh, dir_path) if plot_switch == True: ##Plot connectogram plotting.plot_connectogram(conn_matrix, conn_model, atlas_name, dir_path, ID, NETWORK, label_names) ##Plot adj. matrix based on determined inputs plotting.plot_conn_mat(conn_matrix, conn_model, atlas_name, dir_path, ID, NETWORK, label_names, mask) ##Plot connectome viz for all Yeo networks if all_nets != False: plotting.plot_membership(membership_plotting, conn_matrix, conn_model, coords, edge_threshold, atlas_name, dir_path) else: out_path_fig = dir_path + '/' + ID + '_' + atlas_name + '_connectome_viz.png' niplot.plot_connectome(conn_matrix, coords, title=atlas_name, edge_threshold=edge_threshold, node_size=20, colorbar=True, output_file=out_path_fig) return est_path, thr
def adaptive_thresholding(ts_within_spheres, conn_model, NETWORK, ID, struct_mat_path, dir_path): import collections from pynets import binarize, thr2prob, est_density def thr_step(func_mat, thr): thr = float(thr) + float(0.01) func_mat = threshold_absolute(func_mat, thr) return func_mat ##Calculate # False Connections def est_error_rates(func_mat, struct_mat_bin, thr): func_mat = thr_step(func_mat, thr) func_mat_bin = binarize(func_mat) diffs = func_mat_bin - struct_mat_bin density = est_density(func_mat) unique, counts = np.unique(diffs, return_counts=True) accuracy_dict = dict(zip(unique, counts)) FN = accuracy_dict.get(-1.0) FP = accuracy_dict.get(1.0) FN_error = float(float(FN)/diffs.size) FP_error = float(float(FP)/diffs.size) total_err = float(float(FP + FN)/diffs.size) return(FP_error, FN_error, total_err, density) [conn_matrix, est_path] = graphestimation.get_conn_matrix(ts_within_spheres, conn_model, NETWORK, ID, dir_path, thr) struct_mat = np.genfromtxt(struct_mat_path) print('Using reference structural matrix from: ' + struct_mat_path) ##Prep functional mx conn_matrix = normalize(conn_matrix) np.fill_diagonal(conn_matrix, 0) func_mat = conn_matrix func_mat_bin = binarize(func_mat) fG=nx.from_numpy_matrix(func_mat) density = est_density(func_mat) ##Prep Structural mx np.fill_diagonal(struct_mat, 0) struct_mat_thr2bin = thr2prob(struct_mat) struct_mat_bin = binarize(struct_mat_thr2bin) diffs = func_mat_bin - struct_mat_bin unique, counts = np.unique(diffs, return_counts=True) accuracy_dict = dict(zip(unique, counts)) FN = accuracy_dict.get(-1.0) ACC = accuracy_dict.get(0.0) FP = accuracy_dict.get(1.0) FN_error = float(float(FN)/float(diffs.size)) FP_error = float(float(FP)/float(diffs.size)) print('FN Error: ' + str(FN_error)) print('FP Error: ' + str(FP_error)) ACCUR = float(float(ACC)/float(diffs.size)) total_err = float(float(FP + FN)/diffs.size) print('Using Structural Correspondence as Ground Truth. Unthresholded FP Error: ' + str(FP_error*100) + '%' + '; Unthresholded FN Error: ' + str(FN_error*100) + '%' + '; Unthresholded Accuracy: ' + str(ACCUR*100) + '%') print('Adaptively thresholding...') thr=0.0 ##Create dictionary d = {} d[str(thr)] = [FP_error, FN_error, total_err, density] print('Creating dictionary of thresholds...') while thr < 0.2: [FP_error, FN_error, total_err, density] = est_error_rates(func_mat, struct_mat_bin, thr) d[str(thr)] = [round(FP_error,2), round(FN_error,2), round(total_err,2), round(density,2)] thr = thr + 0.0001 d = collections.OrderedDict(sorted(d.items())) good_threshes=[] for key, value in d.items(): if value[0] == value[1]: good_threshes.append(float(key)) [conn_matrix, est_path] = graphestimation.get_conn_matrix(ts_within_spheres, conn_model, NETWORK, ID, dir_path, thr) conn_matrix = normalize(conn_matrix) np.fill_diagonal(conn_matrix, 0) min_thresh = min(good_threshes) FP = d[str(min_thresh)][0] FN = d[str(min_thresh)][1] FN_error = float(float(FN)/float(diffs.size)) FP_error = float(float(FP)/float(diffs.size)) density = est_density(conn_matrix) print('\n\n\nBest Threshold: ' + str(min_thresh)) print('Graph Density: ' + str(density)) print('Final Thresholded FN Error: ' + str(FN_error)) print('Final Thresholded FP Error: ' + str(FP_error) + '\n\n\n') conn_matrix = threshold_absolute(conn_matrix, min_thresh) edge_threshold = str(float(min_thresh)*100) +'%' return(conn_matrix, est_path, edge_threshold, min_thresh)