def test_unique_edges(): faces = np.array([[0, 1, 2], [1, 2, 0]]) e = array_to_set([[1, 2], [0, 1], [0, 2]]) u = unique_edges(faces) nt.assert_equal(e, array_to_set(u)) u, m = unique_edges(faces, return_mapping=True) nt.assert_equal(e, array_to_set(u)) edges = [[[0, 1], [1, 2], [2, 0]], [[1, 2], [2, 0], [0, 1]]] nt.assert_equal(np.sort(u[m], -1), np.sort(edges, -1))
def test_dni_eit(): btable = np.loadtxt(get_data('dsi515btable')) bvals = btable[:, 0] bvecs = btable[:, 1:] data, descr = sim_data(bvals, bvecs) #load odf sphere vertices, faces = sphere_vf_from('symmetric724') edges = unique_edges(faces) #create the sphere odf_sphere = (vertices, faces) dn = DiffusionNablaModel(bvals, bvecs, odf_sphere) dn.relative_peak_threshold = 0.5 dn.angular_distance_threshold = 20 dnfit = dn.fit(data) print('DiffusionNablaModel') for i, d in enumerate(data): print(descr[i], np.sum(dnfit.peak_values[i] > 0)) ei = EquatorialInversionModel(bvals, bvecs, odf_sphere) ei.relative_peak_threshold = 0.3 ei.angular_distance_threshold = 15 ei.set_operator('laplacian') eifit = ei.fit(data, return_odf=True) print('EquatorialInversionModel') for i, d in enumerate(data): print(descr[i], np.sum(eifit.peak_values[i] > 0)) assert_equal(descr[i][1], np.sum(eifit.peak_values[i] > 0))
def test_local_maxima(): sphere = get_sphere('symmetric724') vertices, faces = sphere.vertices, sphere.faces edges = unique_edges(faces) odf = abs(vertices.sum(-1)) odf[1] = 10. odf[143] = 143. odf[505] = 505 peak_values, peak_index = local_maxima(odf, edges) npt.assert_array_equal(peak_values, [505, 143, 10]) npt.assert_array_equal(peak_index, [505, 143, 1]) hemisphere = HemiSphere(xyz=vertices, faces=faces) vertices_half, edges_half = hemisphere.vertices, hemisphere.edges odf = abs(vertices_half.sum(-1)) odf[1] = 10. odf[143] = 143. peak_value, peak_index = local_maxima(odf, edges_half) npt.assert_array_equal(peak_value, [143, 10]) npt.assert_array_equal(peak_index, [143, 1]) odf[20] = np.nan npt.assert_raises(ValueError, local_maxima, odf, edges_half)
def test_dni_eit(): btable=np.loadtxt(get_data('dsi515btable')) bvals=btable[:,0] bvecs=btable[:,1:] data,descr=sim_data(bvals,bvecs) #load odf sphere vertices,faces = sphere_vf_from('symmetric724') edges = unique_edges(faces) #create the sphere odf_sphere=(vertices,faces) dn=DiffusionNablaModel(bvals,bvecs,odf_sphere) dn.relative_peak_threshold = 0.5 dn.angular_distance_threshold = 20 dnfit=dn.fit(data) print('DiffusionNablaModel') for i,d in enumerate(data): print(descr[i], np.sum(dnfit.peak_values[i]>0)) ei=EquatorialInversionModel(bvals,bvecs,odf_sphere) ei.relative_peak_threshold = 0.3 ei.angular_distance_threshold = 15 ei.set_operator('laplacian') eifit = ei.fit(data,return_odf=True) print('EquatorialInversionModel') for i,d in enumerate(data): print(descr[i], np.sum(eifit.peak_values[i]>0)) assert_equal(descr[i][1], np.sum(eifit.peak_values[i]>0))
def bench_local_maxima(): repeat = 10000 sphere = get_sphere('symmetric724') vertices, faces = sphere.vertices, sphere.faces odf = abs(vertices.sum(-1)) edges = unique_edges(faces) print('Timing peak finding') timed0 = measure("local_maxima(odf, edges)", repeat) print('Actual sphere: %0.2f' % timed0) # Create an artificial odf with a few peaks odf = np.zeros(len(vertices)) odf[1] = 1. odf[143] = 143. odf[505] = 505. timed1 = measure("local_maxima(odf, edges)", repeat) print('Few-peak sphere: %0.2f' % timed1)
def bench_local_maxima(): repeat = 10000 sphere = get_sphere("symmetric724") vertices, faces = sphere.vertices, sphere.faces odf = abs(vertices.sum(-1)) edges = unique_edges(faces) print("Timing peak finding") timed0 = measure("local_maxima(odf, edges)", repeat) print("Actual sphere: %0.2f" % timed0) # Create an artificial odf with a few peaks odf = np.zeros(len(vertices)) odf[1] = 1.0 odf[143] = 143.0 odf[505] = 505.0 timed1 = measure("local_maxima(odf, edges)", repeat) print("Few-peak sphere: %0.2f" % timed1)
def test_local_maxima(): sphere = get_sphere('symmetric724') vertices, faces = sphere.vertices, sphere.faces edges = unique_edges(faces) # Check that the first peak is == max(odf) odf = abs(vertices.sum(-1)) peak_values, peak_index = local_maxima(odf, edges) npt.assert_equal(max(odf), peak_values[0]) npt.assert_equal(max(odf), odf[peak_index[0]]) # Create an artificial odf with a few peaks odf = np.zeros(len(vertices)) odf[1] = 1. odf[143] = 143. odf[505] = 505. peak_values, peak_index = local_maxima(odf, edges) npt.assert_array_equal(peak_values, [505, 143, 1]) npt.assert_array_equal(peak_index, [505, 143, 1]) # Check that neighboring points can both be peaks odf = np.zeros(len(vertices)) point1, point2 = edges[0] odf[[point1, point2]] = 1. peak_values, peak_index = local_maxima(odf, edges) npt.assert_array_equal(peak_values, [1., 1.]) npt.assert_(point1 in peak_index) npt.assert_(point2 in peak_index) # Repeat with a hemisphere hemisphere = HemiSphere(xyz=vertices, faces=faces) vertices, edges = hemisphere.vertices, hemisphere.edges # Check that the first peak is == max(odf) odf = abs(vertices.sum(-1)) peak_values, peak_index = local_maxima(odf, edges) npt.assert_equal(max(odf), peak_values[0]) npt.assert_equal(max(odf), odf[peak_index[0]]) # Create an artificial odf with a few peaks odf = np.zeros(len(vertices)) odf[1] = 1. odf[143] = 143. odf[300] = 300. peak_value, peak_index = local_maxima(odf, edges) npt.assert_array_equal(peak_value, [300, 143, 1]) npt.assert_array_equal(peak_index, [300, 143, 1]) # Check that neighboring points can both be peaks odf = np.zeros(len(vertices)) point1, point2 = edges[0] odf[[point1, point2]] = 1. peak_values, peak_index = local_maxima(odf, edges) npt.assert_array_equal(peak_values, [1., 1.]) npt.assert_(point1 in peak_index) npt.assert_(point2 in peak_index) # Should raise an error if odf has nans odf[20] = np.nan npt.assert_raises(ValueError, local_maxima, odf, edges) # Should raise an error if edge values are too large to index odf edges[0, 0] = 9999 odf[20] = 0 npt.assert_raises(IndexError, local_maxima, odf, edges)
def test_local_maxima(): sphere = default_sphere vertices, faces = sphere.vertices, sphere.faces edges = unique_edges(faces) # Check that the first peak is == max(odf) odf = abs(vertices.sum(-1)) peak_values, peak_index = local_maxima(odf, edges) npt.assert_equal(max(odf), peak_values[0]) npt.assert_equal(max(odf), odf[peak_index[0]]) # Create an artificial odf with a few peaks odf = np.zeros(len(vertices)) odf[1] = 1. odf[143] = 143. odf[361] = 361. peak_values, peak_index = local_maxima(odf, edges) npt.assert_array_equal(peak_values, [361, 143, 1]) npt.assert_array_equal(peak_index, [361, 143, 1]) # Check that neighboring points can both be peaks odf = np.zeros(len(vertices)) point1, point2 = edges[0] odf[[point1, point2]] = 1. peak_values, peak_index = local_maxima(odf, edges) npt.assert_array_equal(peak_values, [1., 1.]) npt.assert_(point1 in peak_index) npt.assert_(point2 in peak_index) # Repeat with a hemisphere hemisphere = HemiSphere(xyz=vertices, faces=faces) vertices, edges = hemisphere.vertices, hemisphere.edges # Check that the first peak is == max(odf) odf = abs(vertices.sum(-1)) peak_values, peak_index = local_maxima(odf, edges) npt.assert_equal(max(odf), peak_values[0]) npt.assert_equal(max(odf), odf[peak_index[0]]) # Create an artificial odf with a few peaks odf = np.zeros(len(vertices)) odf[1] = 1. odf[143] = 143. odf[300] = 300. peak_value, peak_index = local_maxima(odf, edges) npt.assert_array_equal(peak_value, [300, 143, 1]) npt.assert_array_equal(peak_index, [300, 143, 1]) # Check that neighboring points can both be peaks odf = np.zeros(len(vertices)) point1, point2 = edges[0] odf[[point1, point2]] = 1. peak_values, peak_index = local_maxima(odf, edges) npt.assert_array_equal(peak_values, [1., 1.]) npt.assert_(point1 in peak_index) npt.assert_(point2 in peak_index) # Should raise an error if odf has nans odf[20] = np.nan npt.assert_raises(ValueError, local_maxima, odf, edges) # Should raise an error if edge values are too large to index odf edges[0, 0] = 9999 odf[20] = 0 npt.assert_raises(IndexError, local_maxima, odf, edges)