def test_compose_decompose_matrix44(): for i in range(20): x0 = np.random.rand(12) mat = compose_matrix44(x0[:6]) assert_array_almost_equal(x0[:6], decompose_matrix44(mat, size=6)) mat = compose_matrix44(x0[:7]) assert_array_almost_equal(x0[:7], decompose_matrix44(mat, size=7)) mat = compose_matrix44(x0[:12]) assert_array_almost_equal(x0[:12], decompose_matrix44(mat, size=12)) assert_raises(ValueError, decompose_matrix44, mat, 20)
def test_cascade_of_optimizations(): cingulum_bundles = two_cingulum_bundles() cb1 = cingulum_bundles[0] cb1 = set_number_of_points(cb1, 20) test_x0 = np.array([10, 4, 3, 0, 20, 10, 1.5, 1.5, 1.5, 0., 0.2, 0]) cb2 = transform_streamlines(cingulum_bundles[0], compose_matrix44(test_x0)) cb2 = set_number_of_points(cb2, 20) print('first rigid') slr = StreamlineLinearRegistration(x0=6) slm = slr.optimize(cb1, cb2) print('then similarity') slr2 = StreamlineLinearRegistration(x0=7) slm2 = slr2.optimize(cb1, cb2, slm.matrix) print('then affine') slr3 = StreamlineLinearRegistration(x0=12, options={'maxiter': 50}) slm3 = slr3.optimize(cb1, cb2, slm2.matrix) assert_(slm2.fopt < slm.fopt) assert_(slm3.fopt < slm2.fopt)
def distance_mdf(x0, static, moving): # Minimum Direct Flip (MDF) distance [Tract] aff = compose_matrix44(x0) moving = transform_streamlines(moving, aff) cost = mdf_cost(static, moving) costs.append(cost) return cost
def distance_tract_clustering_medoids(x0, static, moving, k_medoids, beta, max_dist): affine = compose_matrix44(x0) moving = transform_streamlines(moving, affine) #con_static = np.concatenate(static) con_moving = np.concatenate(moving) # con_static = static # con_moving=moving tree = KDTree(con_moving) cost = mdf_cost(static, moving) k = len(k_medoids.get_medoids()) clustering_cost = 0 for i in range(k): mean = np.mean(con_moving[k_medoids.get_clusters()[i]], axis=0) clustering_cost += np.linalg.norm( con_moving[k_medoids.get_medoids()[i]] - con_moving[tree.query([mean], k=1)[1][0]][0]) #print(clustering_cost) cost += beta * clustering_cost costs.append(cost) return cost
def test_rigid_real_bundles(): bundle_initial = fornix_streamlines()[:20] bundle, shift = center_streamlines(bundle_initial) mat = compose_matrix44([0, 0, 20, 45., 0, 0]) bundle2 = transform_streamlines(bundle, mat) bundle_sum_distance = BundleSumDistanceMatrixMetric() srr = StreamlineLinearRegistration(bundle_sum_distance, x0=np.zeros(6), method='Powell') new_bundle2 = srr.optimize(bundle, bundle2).transform(bundle2) evaluate_convergence(bundle, new_bundle2) bundle_min_distance = BundleMinDistanceMatrixMetric() srr = StreamlineLinearRegistration(bundle_min_distance, x0=np.zeros(6), method='Powell') new_bundle2 = srr.optimize(bundle, bundle2).transform(bundle2) evaluate_convergence(bundle, new_bundle2) assert_raises(ValueError, StreamlineLinearRegistration, method='Whatever')
def transform(x0, moving): idx = [ np.hstack(KDTree(j).query(i, k=1)[1]) for i, j in zip(set_number_of_points(moving, len(x0)), moving) ] new_moving = [] for i in range(len(idx)): temp = [] index = 0 for k in range(len(x0) - 1): length2 = idx[i][k + 1] - idx[i][k] j = 0 for index in range(idx[i][k], idx[i][k + 1]): mat1 = np.copy(x0[k]).astype(float) mat1[:-1] = ((length2 - j) / length2) * mat1[:-1] mat2 = np.copy(x0[k + 1]).astype(float) mat2[:-1] = (j / length2) * mat2[:-1] mat3 = np.zeros((7, )) mat3[:-1] = mat2[:-1] + mat1[:-1] mat3[-1] = mat2[-1] * mat1[-1] temp.append( apply_affine(compose_matrix44(mat3), moving[i][index])) j += 1 index += 1 new_moving.append(np.vstack(temp)) return new_moving
def test_cascade_of_optimizations_and_threading(): cingulum_bundles = two_cingulum_bundles() cb1 = cingulum_bundles[0] cb1 = set_number_of_points(cb1, 20) test_x0 = np.array([10, 4, 3, 0, 20, 10, 1.5, 1.5, 1.5, 0., 0.2, 0]) cb2 = transform_streamlines(cingulum_bundles[0], compose_matrix44(test_x0)) cb2 = set_number_of_points(cb2, 20) print('first rigid') slr = StreamlineLinearRegistration(x0=6, num_threads=1) slm = slr.optimize(cb1, cb2) print('then similarity') slr2 = StreamlineLinearRegistration(x0=7, num_threads=2) slm2 = slr2.optimize(cb1, cb2, slm.matrix) print('then affine') slr3 = StreamlineLinearRegistration(x0=12, options={'maxiter': 50}, num_threads=None) slm3 = slr3.optimize(cb1, cb2, slm2.matrix) assert_(slm2.fopt < slm.fopt) assert_(slm3.fopt < slm2.fopt)
def transform(affine, bundle, clusters_centers): num = len(clusters_centers) kdtree = KDTree(clusters_centers) distances, ids = kdtree.query(np.concatenate(bundle), k=num) distances[distances == 0] = 1 #centroid distance must be 1 distances = 1 / distances #heigh value for close verteces distances = np.divide( distances, distances.sum(axis=1).reshape((distances.shape[0], 1))) # weights = distances[:,0]/distances.sum(axis=1) #The weigh is affine = [ compose_matrix44( np.multiply(np.reshape(dis, (num, 1)), affine[id]).sum(axis=0)) for dis, id in zip(distances, ids) ] #Z = [apply_affine(compose_matrix44(np.multiply(np.reshape(dis,(num,1)),x0[id]).sum(axis=0)),vec) for dis,id,vec in zip(distances,ids,con_moving)] count = 0 trans_bundle = [] for tract in bundle: temp = [] for vec in tract: temp.append(apply_affine(affine[count], vec)) count += 1 trans_bundle.append(np.array(temp)) return trans_bundle
def distance_tract(x0, static, moving, points, max_dist): # Implementation of MDF using summation [tract] aff = compose_matrix44(x0) moving = transform_streamlines(moving, aff) cost = tract_cost(static, moving, points, max_dist) costs.append(cost) return cost
def distance_pc(x0, static, moving, beta, max_dist): # It uses points cloud and KD Tree affine = compose_matrix44(x0) moving = transform_streamlines(moving, affine) cost = kd_tree_cost(np.concatenate(static), np.concatenate(moving), max_dist) * beta #costs.append(cost) return cost
def fake_registration(): mat = compose_matrix44([50, 20, 20, 180, 90, 90]) target = read_ply('../data/132118/m_ex_atr-left_shore.ply') subject = transform_streamlines(moving, mat) subject_after_registration, _ = register(target, subject) draw_bundles([target, subject, subject_after_registration], [[1, 0, 0], [0, 0, 1], [0, 0, .7]])
def icp_registration(): mat = compose_matrix44([50, 20, 20, 180, 90, 90, 5]) subject = read_ply('../data/164939/m_ex_atr-left_shore.ply') #subject = read_ply('../data/150019/m_ex_atr-left_shore.ply') target = transform_streamlines(subject, mat) subject_T = pca_transform(target, subject) #subject_T=registration_icp(static=target,moving=subject,pca=True) draw_bundles([target, subject_T, subject], [[1, 0, 0], [0, 0, 1], [0, 0, .7]])
def distance_9D(x0, static, moving, beta, max_dist): # It uses 9D tracts distance affine = compose_matrix44(x0) moving = transform_streamlines(moving, affine) new_static = make9D(static) new_moving = make9D(moving) cost = kd_tree_cost(new_static, new_moving, max_dist) * beta costs.append(cost) return cost
def test_whole_brain_slr(): streams, hdr = nib.trackvis.read(get_fnames('fornix')) fornix = [s[0] for s in streams] f = Streamlines(fornix) f1 = f.copy() f2 = f.copy() # check translation f2._data += np.array([50, 0, 0]) moved, transform, qb_centroids1, qb_centroids2 = whole_brain_slr( f1, f2, x0='affine', verbose=True, rm_small_clusters=2, greater_than=0, less_than=np.inf, qbx_thr=[5, 2, 1], progressive=False) # we can check the quality of registration by comparing the matrices # MAM streamline distances before and after SLR D12 = bundles_distances_mam(f1, f2) D1M = bundles_distances_mam(f1, moved) d12_minsum = np.sum(np.min(D12, axis=0)) d1m_minsum = np.sum(np.min(D1M, axis=0)) print("distances= ", d12_minsum, " ", d1m_minsum) assert_equal(d1m_minsum < d12_minsum, True) assert_array_almost_equal(transform[:3, 3], [-50, -0, -0], 2) # check rotation mat = compose_matrix44([0, 0, 0, 15, 0, 0]) f3 = f.copy() f3 = transform_streamlines(f3, mat) moved, transform, qb_centroids1, qb_centroids2 = slr_with_qbx( f1, f3, verbose=False, rm_small_clusters=1, greater_than=20, less_than=np.inf, qbx_thr=[2], progressive=True) # we can also check the quality by looking at the decomposed transform assert_array_almost_equal(decompose_matrix44(transform)[3], -15, 2) moved, transform, qb_centroids1, qb_centroids2 = slr_with_qbx( f1, f3, verbose=False, rm_small_clusters=1, select_random=400, greater_than=20, less_than=np.inf, qbx_thr=[2], progressive=True) # we can also check the quality by looking at the decomposed transform assert_array_almost_equal(decompose_matrix44(transform)[3], -15, 2)
def cost_fun(x0, static_centers, moving_centers, max_dist): moving_centers = np.array([ apply_affine(compose_matrix44(x), vec) for x, vec in zip(np.reshape(x0, (len(moving_centers), 7)), moving_centers) ]) kdtree = KDTree(static_centers) #cost = kdtree.query(moving_centers,k=1)[0].sum() cost = kdtree.query(moving_centers, k=1)[0] cost = cost[np.where(cost < max_dist)].sum() costs.append(cost) return cost
def test_from_to_rigid(): t = np.array([10, 2, 3, 0.1, 20., 30.]) mat = compose_matrix44(t) vec = decompose_matrix44(mat, 6) assert_array_almost_equal(t, vec) t = np.array([0, 0, 0, 180, 0., 0.]) mat = np.eye(4) mat[0, 0] = -1 vec = decompose_matrix44(mat, 6) assert_array_almost_equal(-t, vec)
def test_whole_brain_slr(): streams, hdr = nib.trackvis.read(get_data('fornix')) fornix = [s[0] for s in streams] f = Streamlines(fornix) f1 = f.copy() f2 = f.copy() # check translation f2._data += np.array([50, 0, 0]) moved, transform, qb_centroids1, qb_centroids2 = whole_brain_slr( f1, f2, verbose=True, rm_small_clusters=2, greater_than=0, less_than=np.inf, qb_thr=5, progressive=False) # we can check the quality of registration by comparing the matrices # MAM streamline distances before and after SLR D12 = bundles_distances_mam(f1, f2) D1M = bundles_distances_mam(f1, moved) d12_minsum = np.sum(np.min(D12, axis=0)) d1m_minsum = np.sum(np.min(D1M, axis=0)) assert_equal(d1m_minsum < d12_minsum, True) assert_array_almost_equal(transform[:3, 3], [-50, -0, -0], 3) # check rotation mat = compose_matrix44([0, 0, 0, 15, 0, 0]) f3 = f.copy() f3 = transform_streamlines(f3, mat) moved, transform, qb_centroids1, qb_centroids2 = slr_with_qb( f1, f3, verbose=False, rm_small_clusters=1, greater_than=20, less_than=np.inf, qb_thr=2, progressive=True) # we can also check the quality by looking at the decomposed transform assert_array_almost_equal(decompose_matrix44(transform)[3], -15, 2) moved, transform, qb_centroids1, qb_centroids2 = slr_with_qb( f1, f3, verbose=False, rm_small_clusters=1, select_random=400, greater_than=20, less_than=np.inf, qb_thr=2, progressive=True) # we can also check the quality by looking at the decomposed transform assert_array_almost_equal(decompose_matrix44(transform)[3], -15, 2)
def test_rigid_parallel_lines(): bundle_initial = simulated_bundle() bundle, shift = center_streamlines(bundle_initial) mat = compose_matrix44([20, 0, 10, 0, 40, 0]) bundle2 = transform_streamlines(bundle, mat) bundle_sum_distance = BundleSumDistanceMatrixMetric() options = {'maxcor': 100, 'ftol': 1e-9, 'gtol': 1e-16, 'eps': 1e-3} srr = StreamlineLinearRegistration(metric=bundle_sum_distance, x0=np.zeros(6), method='L-BFGS-B', bounds=None, options=options) new_bundle2 = srr.optimize(bundle, bundle2).transform(bundle2) evaluate_convergence(bundle, new_bundle2)
def distance_tract_clustering_mean(x0, static, moving, kmeans, idx, beta, max_dist): affine = compose_matrix44(x0) moving = transform_streamlines(moving, affine) #con_static = np.concatenate(static) con_moving = np.concatenate(moving) cost = mdf_cost(static, moving) k = len(kmeans.cluster_centers_) clustering_cost = 0 for i in range(k): clustering_cost += np.linalg.norm(kmeans.cluster_centers_[i] - np.mean(con_moving[idx[i]], axis=0)) cost += beta * clustering_cost costs.append(cost) return cost
def test_affine_real_bundles(): bundle_initial = fornix_streamlines() bundle_initial, shift = center_streamlines(bundle_initial) bundle = bundle_initial[:20] xgold = [0, 4, 2, 0, 10, 10, 1.2, 1.1, 1., 0., 0.2, 0.] mat = compose_matrix44(xgold) bundle2 = transform_streamlines(bundle_initial[:20], mat) x0 = np.array([0, 0, 0, 0, 0, 0, 1., 1., 1., 0, 0, 0]) x = 25 bounds = [(-x, x), (-x, x), (-x, x), (-x, x), (-x, x), (-x, x), (0.1, 1.5), (0.1, 1.5), (0.1, 1.5), (-1, 1), (-1, 1), (-1, 1)] options = {'maxcor': 10, 'ftol': 1e-7, 'gtol': 1e-5, 'eps': 1e-8} metric = BundleMinDistanceMatrixMetric() slr = StreamlineLinearRegistration(metric=metric, x0=x0, method='L-BFGS-B', bounds=bounds, verbose=True, options=options) slm = slr.optimize(bundle, bundle2) new_bundle2 = slm.transform(bundle2) slr2 = StreamlineLinearRegistration(metric=metric, x0=x0, method='Powell', bounds=None, verbose=True, options=None) slm2 = slr2.optimize(bundle, new_bundle2) new_bundle2 = slm2.transform(new_bundle2) evaluate_convergence(bundle, new_bundle2)
def test_stream_rigid(): static = fornix_streamlines()[:20] moving = fornix_streamlines()[20:40] static_center, shift = center_streamlines(static) mat = compose_matrix44([0, 0, 0, 0, 40, 0]) moving = transform_streamlines(moving, mat) srr = StreamlineLinearRegistration() sr_params = srr.optimize(static, moving) moved = transform_streamlines(moving, sr_params.matrix) srr = StreamlineLinearRegistration(verbose=True) srm = srr.optimize(static, moving) moved2 = transform_streamlines(moving, srm.matrix) moved3 = srm.transform(moving) assert_array_almost_equal(moved[0], moved2[0], decimal=3) assert_array_almost_equal(moved2[0], moved3[0], decimal=3)
def test_stream_rigid(): static = fornix_streamlines()[:20] moving = fornix_streamlines()[20:40] center_streamlines(static) mat = compose_matrix44([0, 0, 0, 0, 40, 0]) moving = transform_streamlines(moving, mat) srr = StreamlineLinearRegistration() sr_params = srr.optimize(static, moving) moved = transform_streamlines(moving, sr_params.matrix) srr = StreamlineLinearRegistration(verbose=True) srm = srr.optimize(static, moving) moved2 = transform_streamlines(moving, srm.matrix) moved3 = srm.transform(moving) assert_array_almost_equal(moved[0], moved2[0], decimal=3) assert_array_almost_equal(moved2[0], moved3[0], decimal=3)
def test_similarity_real_bundles(): bundle_initial = fornix_streamlines() bundle_initial, shift = center_streamlines(bundle_initial) bundle = bundle_initial[:20] xgold = [0, 0, 10, 0, 0, 0, 1.5] mat = compose_matrix44(xgold) bundle2 = transform_streamlines(bundle_initial[:20], mat) metric = BundleMinDistanceMatrixMetric() x0 = np.array([0, 0, 0, 0, 0, 0, 1], 'f8') slr = StreamlineLinearRegistration(metric=metric, x0=x0, method='Powell', bounds=None, verbose=False) slm = slr.optimize(bundle, bundle2) new_bundle2 = slm.transform(bundle2) evaluate_convergence(bundle, new_bundle2)
def test_whole_brain_slr(): fname = get_fnames('fornix') fornix = load_tractogram(fname, 'same', bbox_valid_check=False).streamlines f = Streamlines(fornix) f1 = f.copy() f2 = f.copy() # check translation f2._data += np.array([50, 0, 0]) moved, transform, qb_centroids1, qb_centroids2 = whole_brain_slr( f1, f2, x0='affine', verbose=True, rm_small_clusters=2, greater_than=0, less_than=np.inf, qbx_thr=[5, 2, 1], progressive=False) # we can check the quality of registration by comparing the matrices # MAM streamline distances before and after SLR D12 = bundles_distances_mam(f1, f2) D1M = bundles_distances_mam(f1, moved) d12_minsum = np.sum(np.min(D12, axis=0)) d1m_minsum = np.sum(np.min(D1M, axis=0)) print("distances= ", d12_minsum, " ", d1m_minsum) assert_equal(d1m_minsum < d12_minsum, True) assert_array_almost_equal(transform[:3, 3], [-50, -0, -0], 2) # check rotation mat = compose_matrix44([0, 0, 0, 15, 0, 0]) f3 = f.copy() f3 = transform_streamlines(f3, mat) moved, transform, qb_centroids1, qb_centroids2 = slr_with_qbx( f1, f3, verbose=False, rm_small_clusters=1, greater_than=20, less_than=np.inf, qbx_thr=[2], progressive=True) # we can also check the quality by looking at the decomposed transform assert_array_almost_equal(decompose_matrix44(transform)[3], -15, 2) moved, transform, qb_centroids1, qb_centroids2 = slr_with_qbx( f1, f3, verbose=False, rm_small_clusters=1, select_random=400, greater_than=20, less_than=np.inf, qbx_thr=[2], progressive=True) # we can also check the quality by looking at the decomposed transform assert_array_almost_equal(decompose_matrix44(transform)[3], -15, 2)
def registration_icp(static, moving, points=20, pca=True, maxiter=100000, affine=[0, 0, 0, 0, 0, 0, 1], clustering=None, medoids=[0, 1, 2], k=3, beta=999, max_dist=40, dist='pc'): options = { 'maxcor': 10, 'ftol': 1e-7, 'gtol': 1e-5, 'eps': 1e-8, 'maxiter': maxiter } #options1 = {'xtol': 1e-6, 'ftol': 1e-6, 'maxiter': 1e6} if pca: moving = pca_transform_norm(static, moving, max_dist) else: mean_m = np.mean(np.concatenate(moving), axis=0) mean_s = np.mean(np.concatenate(static), axis=0) moving = [i - mean_m + mean_s for i in moving] original_moving = moving.copy() static = set_number_of_points(static, points) moving = set_number_of_points(moving, points) if clustering == 'kmeans': kmeans = KMeans(k).fit(np.concatenate(moving)) idx = {i: np.where(kmeans.labels_ == i)[0] for i in range(k)} #dist = Clustering().distance_pc_clustering_mean if dist == 'pc': dist_fun = distance_pc_clustering_mean else: dist_fun = distance_tract_clustering_mean args = (static, moving, kmeans, idx, beta, max_dist) print('kmeans') elif clustering == 'kmedoids': k_medoids = kmedoids(np.concatenate(moving), medoids) k_medoids.process() #dist = Clustering().distance_pc_clustering_medoids if dist == 'pc': dist_fun = distance_pc_clustering_medoids else: dist_fun = distance_tract_clustering_medoids args = (static, moving, k_medoids, beta, max_dist) print('kmedoids') else: if dist == 'pc': dist_fun = distance_pc args = (static, moving, beta, max_dist) else: dist_fun = distance_mdf args = (static, moving) print('Without Clustering') 'L-BFGS-B,Powell' m = Optimizer(dist_fun, affine, args=args, method='L-BFGS-B', options=options) #m = Optimizer(dist, affine,args=args,method='Powell',options=options1) m.print_summary() mat = compose_matrix44(m.xopt) return transform_streamlines(original_moving, mat)
from dipy.align.streamlinear import compose_matrix44 from time import time import matplotlib.pyplot as plt from src.tractography.io import read_ply from src.tractography.registration import register from src.tractography.viz import draw_bundles moving = read_ply('data/197348/m_ex_atr-right_shore.ply') con_moving = np.concatenate(moving) length = con_moving.shape[0] affine1 = np.array([ compose_matrix44([0, 0, 0, i / 1000, i / 1000, 0])[:3, :].T for i in range(length) ]) affine2 = np.vstack(affine1) #affine1 = np.concatenate(affine1) new_con_moving = np.ones((length, 4)) new_con_moving[:, :-1] = con_moving D = sparse.coo_matrix( (np.concatenate(new_con_moving), (np.repeat(np.arange(length), 4), np.arange(length * 4))), (length, length * 4)).tocsr() new_con_mov = D.dot(affine2)
'maxcor': 10, 'ftol': 1e-7, 'gtol': 1e-5, 'eps': 1e-8, 'maxiter': 1000 } start = time() m = Optimizer(distance_pc, x0, args=(static, pca_moving, 1, 50), method='L-BFGS-B', options=options) end = time() aff = compose_matrix44(m.xopt) new_moving = transform_streamlines(pca_moving, aff) draw_bundles([new_moving, static], [[0, 0, 1], [1, 0, 0]]) ''' Build KDTree ''' kdtree = KDTree(np.concatenate(static)) distances = kdtree.query(np.concatenate(new_moving), k=1)[0] hours = int((end - start) / 3600) minutes = int(((end - start) % 3600) / 60) seconds = int(((end - start) % 3600) % 60) print("Duration: {:02}:{}:{}".format(hours, minutes, seconds)) ''' Get the threshold ''' #max_range = max(distances) plt.hist(distances, bins='auto', range=(0, max_range)) plt.title("Non Linear Method (optimizer) | Duration: {:02}:{:02}:{:02}".format( hours, minutes, seconds) +
from src.tractography.viz import draw_bundles from os import listdir # , mkdir from os.path import isfile # , isdir from src.tractography.io import read_ply import argparse from dipy.align.streamlinear import compose_matrix44 from dipy.tracking.streamline import transform_streamlines parser = argparse.ArgumentParser(description='Input argument parser.') parser.add_argument('-f', type=str, help='location of files') args = parser.parse_args() data_path = '../data/132118/' #data_path = args.f files = [ data_path + f for f in listdir(data_path) if isfile(data_path + f) and f.endswith('.ply') ] mat = compose_matrix44([0, 0, 0, 0, 90, 90]) brain = [] for name in files: brain.append(transform_streamlines(read_ply(name), mat)) draw_bundles(brain, rotate=True) """ data1 = read_ply('../data/132118/m_ex_atr-left_shore.ply') data2 = read_ply('../data/132118/m_ex_atr-right_shore.ply') draw_bundles([data1,data2]) """