def remove_cci_outliers(streamlines): s = Streamlines(streamlines) cci = cluster_confidence(s,subsample=7) keep_streamlines = Streamlines() keep_streamlines_idx = list() for i, sl in enumerate(s): if cci[i] >= 1: keep_streamlines.append(sl) keep_streamlines_idx.append(i) return keep_streamlines,keep_streamlines_idx
def clean_subject(ndata): cci = [] streamlines_evl = Streamlines() for j in range(np.shape(ndata)[0]): tmp = ndata[j] tmp = zero_remove(tmp) streamlines_evl.append(tmp) lengths = list(utils.length(streamlines_evl)) long_streamlines_evl = Streamlines() for i, sl in enumerate(streamlines_evl): if lengths[i] > 40: long_streamlines_evl.append(sl) streamlines_test = remove_indentical(10, long_streamlines_evl) cci_idv = cluster_confidence(streamlines_test, subsample=64) cci.append(cci_idv) return gen_array(streamlines_test), np.array(cci[0])
def cal_cci(name_list, text): cci = [] for i in range(len(name_list)): path = os.path.join('np_data', name_list[i]) sample_data = np.load(path) ndata = sample_data['arr_0'] streamlines_evl = Streamlines() for j in range(np.shape(ndata)[0]): tmp = ndata[j] tmp = zero_remove(tmp) streamlines_evl.append(tmp) streamlines_test = remove_indentical(10, streamlines_evl) cci_idv = cluster_confidence(streamlines_test, subsample=64) cci.append(cci_idv) print('the ' + str(i + 1) + ' finished') path_save = os.path.join('data_4_model', text) np.save(path_save, cci) return cci
""" lengths = list(length(streamlines)) long_streamlines = Streamlines() for i, sl in enumerate(streamlines): if lengths[i] > 40: long_streamlines.append(sl) """ Now we calculate the Cluster Confidence Index using the corpus callosum streamline bundle and visualize them. """ cci = cluster_confidence(long_streamlines) # Visualize the streamlines, colored by cci ren = window.Renderer() hue = [0.5, 1] saturation = [0.0, 1.0] lut_cmap = actor.colormap_lookup_table(scale_range=(cci.min(), cci.max()/4), hue_range=hue, saturation_range=saturation) bar3 = actor.scalar_bar(lut_cmap) ren.add(bar3) stream_actor = actor.line(long_streamlines, cci, linewidth=0.1,
""" We do not want our results inflated by short streamlines, so we remove streamlines shorter than 40mm prior to calculating the CCI. """ lengths = list(length(streamlines)) long_streamlines = Streamlines() for i, sl in enumerate(streamlines): if lengths[i] > 40: long_streamlines.append(sl) """ Now we calculate the Cluster Confidence Index using the corpus callosum streamline bundle and visualize them. """ cci = cluster_confidence(long_streamlines) # Visualize the streamlines, colored by cci ren = window.Renderer() hue = [0.5, 1] saturation = [0.0, 1.0] lut_cmap = actor.colormap_lookup_table(scale_range=(cci.min(), cci.max() / 4), hue_range=hue, saturation_range=saturation) bar3 = actor.scalar_bar(lut_cmap) ren.add(bar3) stream_actor = actor.line(long_streamlines,
def test_cluster_confidence(): mysl = np.array([np.arange(10)] * 3, 'float').T # a short streamline (<20 mm) should raise an error unless override=True test_streamlines = Streamlines() test_streamlines.append(mysl) assert_raises(ValueError, cluster_confidence, test_streamlines) cci = cluster_confidence(test_streamlines, override=True) # two identical streamlines should raise an error test_streamlines = Streamlines() test_streamlines.append(mysl, cache_build=True) test_streamlines.append(mysl) test_streamlines.finalize_append() assert_raises(ValueError, cluster_confidence, test_streamlines) # 3 offset collinear streamlines test_streamlines = Streamlines() test_streamlines.append(mysl, cache_build=True) test_streamlines.append(mysl + 1) test_streamlines.append(mysl + 2) test_streamlines.finalize_append() cci = cluster_confidence(test_streamlines, override=True) assert_equal(cci[0], cci[2]) assert_true(cci[1] > cci[0]) # 3 parallel streamlines mysl = np.zeros([10, 3]) mysl[:, 0] = np.arange(10) mysl2 = mysl.copy() mysl2[:, 1] = 1 mysl3 = mysl.copy() mysl3[:, 1] = 2 mysl4 = mysl.copy() mysl4[:, 1] = 4 mysl5 = mysl.copy() mysl5[:, 1] = 5000 test_streamlines_p1 = Streamlines() test_streamlines_p1.append(mysl, cache_build=True) test_streamlines_p1.append(mysl2) test_streamlines_p1.append(mysl3) test_streamlines_p1.finalize_append() test_streamlines_p2 = Streamlines() test_streamlines_p2.append(mysl, cache_build=True) test_streamlines_p2.append(mysl3) test_streamlines_p2.append(mysl4) test_streamlines_p2.finalize_append() test_streamlines_p3 = Streamlines() test_streamlines_p3.append(mysl, cache_build=True) test_streamlines_p3.append(mysl2) test_streamlines_p3.append(mysl3) test_streamlines_p3.append(mysl5) test_streamlines_p3.finalize_append() cci_p1 = cluster_confidence(test_streamlines_p1, override=True) cci_p2 = cluster_confidence(test_streamlines_p2, override=True) # test relative distance assert_array_equal(cci_p1, cci_p2 * 2) # test simple cci calculation expected_p1 = np.array([1. / 1 + 1. / 2, 1. / 1 + 1. / 1, 1. / 1 + 1. / 2]) expected_p2 = np.array([1. / 2 + 1. / 4, 1. / 2 + 1. / 2, 1. / 2 + 1. / 4]) assert_array_equal(expected_p1, cci_p1) assert_array_equal(expected_p2, cci_p2) # test power variable calculation (dropoff with distance) cci_p1_pow2 = cluster_confidence(test_streamlines_p1, power=2, override=True) expected_p1_pow2 = np.array([ np.power(1. / 1, 2) + np.power(1. / 2, 2), np.power(1. / 1, 2) + np.power(1. / 1, 2), np.power(1. / 1, 2) + np.power(1. / 2, 2) ]) assert_array_equal(cci_p1_pow2, expected_p1_pow2) # test max distance (ignore distant sls) cci_dist = cluster_confidence(test_streamlines_p3, max_mdf=5, override=True) expected_cci_dist = np.concatenate([cci_p1, np.zeros(1)]) assert_array_equal(cci_dist, expected_cci_dist)
score_tmp = cal_dist_score(avg_dist, cc_uniques) scores.append(score_tmp) return np.array(scores) result_dis = cal_score_subject(bundle) #%% streamlines_evl = Streamlines() for j in range(np.shape(bundle)[0]): tmp = bundle[j] tmp = zero_remove(tmp) streamlines_evl.append(tmp) result_cci = cluster_confidence(streamlines_evl, subsample=64) #%% preds_cci = 1 * (result_cci > results.cci_thre) preds_dis = 1 * (result_dis < results.dis_thre) pred_if = 1 * (pred_if > 0) pred_1 = 1 * ((preds_cci + preds_dis + pred_if) > 2) bundle_str = Streamlines() for i in range(np.shape(bundle)[0]): tmp = bundle[i] tmp = zero_remove(tmp) #tmp = tmp[~np.all(tmp == 0, axis=-1)]
for i in range(id.__len__()): # for j in range(i + 1): # edge_s_list = [] # print(i,j) if (i + 1, j + 1) in streamline_dict and mat_medians[i, j] > 0: edge_s_list += streamline_dict[(i + 1, j + 1)] if (j + 1, i + 1) in streamline_dict and mat_medians[i, j] > 0: edge_s_list += streamline_dict[(j + 1, i + 1)] edge_vec_vols = [mat_medians[i, j]] * edge_s_list.__len__() s_list = s_list + edge_s_list vec_vols = vec_vols + edge_vec_vols s = Streamlines(s_list) cci = cluster_confidence(s) keep_streamlines = Streamlines() for i, sl in enumerate(s): if cci[i] >= 1: keep_streamlines.append(sl) # Visualize the streamlines we kept ren = window.Renderer() keep_streamlines_actor = actor.line(keep_streamlines, linewidth=0.1) ren.add(keep_streamlines_actor) interactive = True if interactive: