def test_sph_harm_ind_list(): m_list, n_list = sph_harm_ind_list(8) assert_equal(m_list.shape, n_list.shape) assert_equal(m_list.shape, (45,)) assert_true(np.all(np.abs(m_list) <= n_list)) assert_array_equal(n_list % 2, 0) assert_raises(ValueError, sph_harm_ind_list, 1)
def test_flirt2aff(): from os.path import join as pjoin from dipy.testing import assert_true import scipy.ndimage as ndi import nibabel as nib """ matfile = pjoin('fa_data', '1312211075232351192010092912092080924175865ep2dadvdiffDSI10125x25x25STs005a001_affine_transf.mat') in_fname = pjoin('fa_data', '1312211075232351192010092912092080924175865ep2dadvdiffDSI10125x25x25STs005a001_bet_FA.nii.gz') """ matfile=flirtaff in_fname = ffa ref_fname = '/usr/share/fsl/data/standard/FMRIB58_FA_1mm.nii.gz' res = flirt2aff_files(matfile, in_fname, ref_fname) mat = np.loadtxt(matfile) in_img = nib.load(in_fname) ref_img = nib.load(ref_fname) assert_true(np.all(res == flirt2aff(mat, in_img, ref_img))) # mm to mm transform # mm_in2mm_ref = np.dot(ref_img.affine, # np.dot(res, npl.inv(in_img.affine))) # make new in image thus transformed in_data = in_img.get_data() ires = npl.inv(res) in_data[np.isnan(in_data)] = 0 resliced_data = ndi.affine_transform(in_data, ires[:3,:3], ires[:3,3], ref_img.shape) resliced_img = nib.Nifti1Image(resliced_data, ref_img.affine) nib.save(resliced_img, 'test.nii')
def test_sph_harm_ind_list(): m_list, n_list = sph_harm_ind_list(8) assert_equal(m_list.shape, n_list.shape) assert_equal(m_list.shape, (45,)) assert_true(np.all(np.abs(m_list) <= n_list)) assert_array_equal(n_list % 2, 0) assert_raises(ValueError, sph_harm_ind_list, 1)
def test_cluster_map_getitem(): nb_clusters = 11 indices = list(range(nb_clusters)) np.random.shuffle(indices) # None trivial ordering advanced_indices = indices + [0, 1, 2, -1, -2, -3] cluster_map = ClusterMap() clusters = [] for i in range(nb_clusters): new_cluster = Cluster(indices=range(i)) cluster_map.add_cluster(new_cluster) clusters.append(new_cluster) # Test indexing for i in advanced_indices: assert_true(cluster_map[i] == clusters[i]) # Test advanced indexing assert_arrays_equal(cluster_map[advanced_indices], [clusters[i] for i in advanced_indices]) # Test index out of bounds assert_raises(IndexError, cluster_map.__getitem__, len(clusters)) assert_raises(IndexError, cluster_map.__getitem__, -len(clusters) - 1) # Test slicing and negative indexing assert_equal(cluster_map[-1], clusters[-1]) assert_array_equal(np.array(cluster_map[::2], dtype=object), np.array(clusters[::2], dtype=object)) assert_arrays_equal(cluster_map[::-1], clusters[::-1]) assert_arrays_equal(cluster_map[:-1], clusters[:-1]) assert_arrays_equal(cluster_map[1:], clusters[1:])
def test_cluster_map_centroid_iter(): rng = np.random.RandomState(42) nb_clusters = 11 cluster_map = ClusterMapCentroid() clusters = [] for i in range(nb_clusters): new_centroid = np.zeros_like(features) new_cluster = ClusterCentroid(new_centroid, indices=rng.randint(0, len(data), size=10)) cluster_map.add_cluster(new_cluster) clusters.append(new_cluster) assert_true( all([c1 is c2 for c1, c2 in zip(cluster_map.clusters, clusters)])) assert_array_equal(cluster_map, clusters) assert_array_equal(cluster_map.clusters, clusters) assert_array_equal(cluster_map, [cluster.indices for cluster in clusters]) # Set refdata cluster_map.refdata = data for c1, c2 in zip(cluster_map, clusters): assert_arrays_equal(c1, [data[i] for i in c2.indices])
def test_cluster_map_centroid_getitem(): nb_clusters = 11 indices = list(range(len(data))) np.random.shuffle(indices) # None trivial ordering advanced_indices = indices + [0, 1, 2, -1, -2, -3] cluster_map = ClusterMapCentroid() clusters = [] for i in range(nb_clusters): centroid = np.zeros_like(features) cluster = ClusterCentroid(centroid) cluster.id = cluster_map.add_cluster(cluster) clusters.append(cluster) # Test indexing for i in advanced_indices: assert_true(cluster_map[i] == clusters[i]) # Test advanced indexing assert_arrays_equal(cluster_map[advanced_indices], [clusters[i] for i in advanced_indices]) # Test index out of bounds assert_raises(IndexError, cluster_map.__getitem__, len(clusters)) assert_raises(IndexError, cluster_map.__getitem__, -len(clusters) - 1) # Test slicing and negative indexing assert_true(cluster_map[-1] == clusters[-1]) assert_array_equal(cluster_map[::2], clusters[::2]) assert_arrays_equal(cluster_map[::-1], clusters[::-1]) assert_arrays_equal(cluster_map[:-1], clusters[:-1]) assert_arrays_equal(cluster_map[1:], clusters[1:])
def test_feature_center_of_mass(): # Test subclassing Feature class CenterOfMassFeature(dipymetric.Feature): def __init__(self): super(CenterOfMassFeature, self).__init__(is_order_invariant=True) def infer_shape(self, streamline): return (1, streamline.shape[1]) def extract(self, streamline): return np.mean(streamline, axis=0)[None, :] for feature in [dipymetric.CenterOfMassFeature(), CenterOfMassFeature()]: for s in [s1, s2, s3, s4]: # Test method infer_shape assert_equal(feature.infer_shape(s), (1, s.shape[1])) # Test method extract features = feature.extract(s) assert_equal(features.shape, (1, s.shape[1])) assert_array_almost_equal(features, np.mean(s, axis=0)[None, :]) # This feature type is order invariant assert_true(feature.is_order_invariant) for s in [s1, s2, s3, s4]: features = feature.extract(s) features_flip = feature.extract(s[::-1]) assert_array_almost_equal(features, features_flip)
def test_cluster_map_iter(): rng = np.random.RandomState(42) nb_clusters = 11 # Test without specifying refdata in ClusterMap cluster_map = ClusterMap() clusters = [] for i in range(nb_clusters): new_cluster = Cluster(indices=rng.randint(0, len(data), size=10)) cluster_map.add_cluster(new_cluster) clusters.append(new_cluster) assert_true(all([c1 is c2 for c1, c2 in zip(cluster_map.clusters, clusters)])) assert_array_equal(cluster_map, clusters) assert_array_equal(cluster_map.clusters, clusters) assert_array_equal(cluster_map, [cluster.indices for cluster in clusters]) # Set refdata cluster_map.refdata = data for c1, c2 in zip(cluster_map, clusters): assert_arrays_equal(c1, [data[i] for i in c2.indices]) # Remove refdata, i.e. back to indices cluster_map.refdata = None assert_array_equal(cluster_map, [cluster.indices for cluster in clusters])
def test_identity_feature(): # Test subclassing Feature class IdentityFeature(dipymetric.Feature): def __init__(self): super(IdentityFeature, self).__init__(is_order_invariant=False) def infer_shape(self, streamline): return streamline.shape def extract(self, streamline): return streamline for feature in [dipymetric.IdentityFeature(), IdentityFeature()]: for s in [s1, s2, s3, s4]: # Test method infer_shape assert_equal(feature.infer_shape(s), s.shape) # Test method extract features = feature.extract(s) assert_equal(features.shape, s.shape) assert_array_equal(features, s) # This feature type is not order invariant assert_false(feature.is_order_invariant) for s in [s1, s2, s3, s4]: features = feature.extract(s) features_flip = feature.extract(s[::-1]) assert_array_equal(features_flip, s[::-1]) assert_true(np.any(np.not_equal(features, features_flip)))
def test_cluster_map_centroid_add_cluster(): clusters = ClusterMapCentroid() centroids = [] for i in range(3): cluster = ClusterCentroid(centroid=np.zeros_like(features)) centroids.append(np.zeros_like(features)) for id_data in range(2 * i): centroids[-1] = ((centroids[-1] * id_data + (id_data + 1) * features) / (id_data + 1)) cluster.assign(id_data, (id_data + 1) * features) cluster.update() clusters.add_cluster(cluster) assert_array_equal(cluster.centroid, centroids[-1]) assert_equal(type(cluster), ClusterCentroid) assert_true(cluster == clusters[-1]) assert_equal(type(clusters.centroids), list) assert_array_equal(list(itertools.chain(*clusters.centroids)), list(itertools.chain(*centroids))) # Check adding features of different sizes (shorter and longer) features_shape_short = (1, features_shape[1] - 3) features_too_short = np.ones(features_shape_short, dtype=dtype) assert_raises(ValueError, cluster.assign, 123, features_too_short) features_shape_long = (1, features_shape[1] + 3) features_too_long = np.ones(features_shape_long, dtype=dtype) assert_raises(ValueError, cluster.assign, 123, features_too_long)
def test_feature_midpoint(): # Test subclassing Feature class MidpointFeature(dipymetric.Feature): def __init__(self): super(MidpointFeature, self).__init__(is_order_invariant=False) def infer_shape(self, streamline): return (1, streamline.shape[1]) def extract(self, streamline): return streamline[[len(streamline)//2]] for feature in [dipymetric.MidpointFeature(), MidpointFeature()]: for s in [s1, s2, s3, s4]: # Test method infer_shape assert_equal(feature.infer_shape(s), (1, s.shape[1])) # Test method extract features = feature.extract(s) assert_equal(features.shape, (1, s.shape[1])) assert_array_almost_equal(features, s[len(s)//2][None, :]) # This feature type is not order invariant assert_false(feature.is_order_invariant) for s in [s1, s2, s3, s4]: features = feature.extract(s) features_flip = feature.extract(s[::-1]) if len(s) % 2 == 0: assert_true(np.any(np.not_equal(features, features_flip))) else: assert_array_equal(features, features_flip)
def test_feature_arclength(): from dipy.tracking.streamline import length # Test subclassing Feature class ArcLengthFeature(dipymetric.Feature): def __init__(self): super(ArcLengthFeature, self).__init__(is_order_invariant=True) def infer_shape(self, streamline): return (1, 1) def extract(self, streamline): return length(streamline)[None, None] for feature in [dipymetric.ArcLengthFeature(), ArcLengthFeature()]: for s in [s1, s2, s3, s4]: # Test method infer_shape assert_equal(feature.infer_shape(s), (1, 1)) # Test method extract features = feature.extract(s) assert_equal(features.shape, (1, 1)) assert_array_almost_equal(features, length(s)[None, None]) # This feature type is order invariant assert_true(feature.is_order_invariant) for s in [s1, s2, s3, s4]: features = feature.extract(s) features_flip = feature.extract(s[::-1]) assert_array_almost_equal(features, features_flip)
def test_gibbs_flow(): def generate_slice(): Nori = 32 image = np.zeros((6 * Nori, 6 * Nori)) image[Nori: 2 * Nori, Nori: 2 * Nori] = 1 image[Nori: 2 * Nori, 4 * Nori: 5 * Nori] = 1 image[2 * Nori: 3 * Nori, Nori: 3 * Nori] = 1 image[3 * Nori: 4 * Nori, 2 * Nori: 3 * Nori] = 2 image[3 * Nori: 4 * Nori, 4 * Nori: 5 * Nori] = 1 image[4 * Nori: 5 * Nori, 3 * Nori: 5 * Nori] = 3 # Corrupt image with gibbs ringing c = np.fft.fft2(image) c = np.fft.fftshift(c) c_crop = c[48:144, 48:144] image_gibbs = abs(np.fft.ifft2(c_crop)/4) return image_gibbs with TemporaryDirectory() as out_dir: image4d = np.zeros((96, 96, 2, 2)) image4d[:, :, 0, 0] = generate_slice() image4d[:, :, 1, 0] = generate_slice() image4d[:, :, 0, 1] = generate_slice() image4d[:, :, 1, 1] = generate_slice() data_path = os.path.join(out_dir, "random_noise.nii.gz") save_nifti(data_path, image4d, np.eye(4)) gibbs_flow = GibbsRingingFlow() gibbs_flow.run(data_path, out_dir=out_dir) assert_true(os.path.isfile( gibbs_flow.last_generated_outputs['out_unring']))
def test_bundle_shape_analysis_flow(): with TemporaryDirectory() as dirpath: data_path = get_fnames('fornix') fornix = load_tractogram(data_path, 'same', bbox_valid_check=False).streamlines f = Streamlines(fornix) mb = os.path.join(dirpath, "model_bundles") sub = os.path.join(dirpath, "subjects") os.mkdir(mb) sft = StatefulTractogram(f, data_path, Space.RASMM) save_tractogram(sft, os.path.join(mb, "temp.trk"), bbox_valid_check=False) os.mkdir(sub) os.mkdir(os.path.join(sub, "patient")) os.mkdir(os.path.join(sub, "control")) p = os.path.join(sub, "patient", "10001") os.mkdir(p) c = os.path.join(sub, "control", "20002") os.mkdir(c) for pre in [p, c]: os.mkdir(os.path.join(pre, "rec_bundles")) sft = StatefulTractogram(f, data_path, Space.RASMM) save_tractogram(sft, os.path.join(pre, "rec_bundles", "temp.trk"), bbox_valid_check=False) os.mkdir(os.path.join(pre, "org_bundles")) sft = StatefulTractogram(f, data_path, Space.RASMM) save_tractogram(sft, os.path.join(pre, "org_bundles", "temp.trk"), bbox_valid_check=False) os.mkdir(os.path.join(pre, "anatomical_measures")) fa = np.random.rand(255, 255, 255) save_nifti(os.path.join(pre, "anatomical_measures", "fa.nii.gz"), fa, affine=np.eye(4)) out_dir = os.path.join(dirpath, "output") os.mkdir(out_dir) sm_flow = BundleShapeAnalysis() sm_flow.run(sub, out_dir=out_dir) assert_true(os.path.exists(os.path.join(out_dir, "temp.npy")))
def test_cluster_map_add_cluster(): clusters = ClusterMap() list_of_cluster_objects = [] list_of_indices = [] for i in range(3): cluster = Cluster() list_of_cluster_objects.append(cluster) list_of_indices.append([]) for id_data in range(2 * i): list_of_indices[-1].append(id_data) cluster.assign(id_data) clusters.add_cluster(cluster) assert_equal(type(cluster), Cluster) assert_equal(len(clusters), i + 1) assert_true(cluster == clusters[-1]) assert_array_equal(list(itertools.chain(*clusters)), list(itertools.chain(*list_of_indices))) # Test adding multiple clusters at once. clusters = ClusterMap() clusters.add_cluster(*list_of_cluster_objects) assert_array_equal(list(itertools.chain(*clusters)), list(itertools.chain(*list_of_indices)))
def test_cluster_map_iter(): rng = np.random.RandomState(42) nb_clusters = 11 # Test without specifying refdata in ClusterMap cluster_map = ClusterMap() clusters = [] for i in range(nb_clusters): new_cluster = Cluster(indices=rng.randint(0, len(data), size=10)) cluster_map.add_cluster(new_cluster) clusters.append(new_cluster) assert_true( all([c1 is c2 for c1, c2 in zip(cluster_map.clusters, clusters)])) assert_array_equal(cluster_map, clusters) assert_array_equal(cluster_map.clusters, clusters) assert_array_equal(cluster_map, [cluster.indices for cluster in clusters]) # Set refdata cluster_map.refdata = data for c1, c2 in zip(cluster_map, clusters): assert_arrays_equal(c1, [data[i] for i in c2.indices]) # Remove refdata, i.e. back to indices cluster_map.refdata = None assert_array_equal(cluster_map, [cluster.indices for cluster in clusters])
def test_linear_mixed_models_flow(): with TemporaryDirectory() as dirpath: out_dir = os.path.join(dirpath, "output") os.mkdir(out_dir) d = {'bundle': ["temp"]*100, 'disk#': [1, 2, 3, 4, 5, 1, 2, 3, 4, 5]*10, 'fa': [0.21, 0.234, 0.44, 0.44, 0.5, 0.23, 0.55, 0.34, 0.76, 0.34]*10, 'subject': ["10001", "10001", "10001", "10001", "10001", "20002", "20002", "20002", "20002", "20002"]*10, 'group': ["control", "control", "control", "control", "control", "patient", "patient", "patient", "patient", "patient"]*10} df = pd.DataFrame(data=d) store = pd.HDFStore(os.path.join(out_dir, 'fa.h5')) store.append('fa', df, data_columns=True) store.close() lmm_flow = LinearMixedModelsFlow() out_dir2 = os.path.join(dirpath, "output2") os.mkdir(out_dir2) input_path = os.path.join(out_dir, "*") lmm_flow.run(input_path, no_disks=5, out_dir=out_dir2) assert_true(os.path.exists(os.path.join(out_dir2, 'temp_fa.png'))) # test error d2 = {'bundle': ["temp"]*10, 'disk#': [1, 2, 3, 4, 5, 1, 2, 3, 4, 5]*1, 'fa': [0.21, 0.234, 0.44, 0.44, 0.5, 0.23, 0.55, 0.34, 0.76, 0.34]*1, 'subject': ["10001", "10001", "10001", "10001", "10001", "20002", "20002", "20002", "20002", "20002"]*1, 'group': ["control", "control", "control", "control", "control", "patient", "patient", "patient", "patient", "patient"]*1} df = pd.DataFrame(data=d2) out_dir3 = os.path.join(dirpath, "output3") os.mkdir(out_dir3) store = pd.HDFStore(os.path.join(out_dir3, 'fa.h5')) store.append('fa', df, data_columns=True) store.close() out_dir4 = os.path.join(dirpath, "output4") os.mkdir(out_dir4) input_path = os.path.join(out_dir3, "*") assert_raises(ValueError, lmm_flow.run, input_path, no_disks=5, out_dir=out_dir4)
def test_patch2self_flow(): with TemporaryDirectory() as out_dir: data_path, fbvals, fbvecs = get_fnames() patch2self_flow = Patch2SelfFlow() patch2self_flow.run(data_path, fbvals, out_dir=out_dir) assert_true(os.path.isfile( patch2self_flow.last_generated_outputs['out_denoised']))
def test_lpca_flow(): with TemporaryDirectory() as out_dir: data_path, fbvals, fbvecs = get_fnames() lpca_flow = LPCAFlow() lpca_flow.run(data_path, fbvals, fbvecs, out_dir=out_dir) assert_true(os.path.isfile( lpca_flow.last_generated_outputs['out_denoised']))
def test_ba(): with TemporaryDirectory() as dirpath: data_path = get_fnames('fornix') fornix = load_tractogram(data_path, 'same', bbox_valid_check=False).streamlines f = Streamlines(fornix) mb = os.path.join(dirpath, "model_bundles") os.mkdir(mb) sft = StatefulTractogram(f, data_path, Space.RASMM) save_tractogram(sft, os.path.join(mb, "temp.trk"), bbox_valid_check=False) rb = os.path.join(dirpath, "rec_bundles") os.mkdir(rb) sft = StatefulTractogram(f, data_path, Space.RASMM) save_tractogram(sft, os.path.join(rb, "temp.trk"), bbox_valid_check=False) ob = os.path.join(dirpath, "org_bundles") os.mkdir(ob) sft = StatefulTractogram(f, data_path, Space.RASMM) save_tractogram(sft, os.path.join(ob, "temp.trk"), bbox_valid_check=False) dt = os.path.join(dirpath, "dti_measures") os.mkdir(dt) fa = np.random.rand(255, 255, 255) save_nifti(os.path.join(dt, "fa.nii.gz"), fa, affine=np.eye(4)) out_dir = os.path.join(dirpath, "output") os.mkdir(out_dir) bundle_analysis(mb, rb, ob, dt, group="patient", subject="10001", no_disks=100, out_dir=out_dir) assert_true(os.path.exists(os.path.join(out_dir, 'fa.h5')))
def test_gen_dirac(): with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=descoteaux07_legacy_msg, category=PendingDeprecationWarning) sh = gen_dirac(np.array([0]), np.array([0]), np.array([0]), np.array([0])) assert_true(np.abs(sh[0] - 1.0 / np.sqrt(4.0 * np.pi)) < 0.0001)
def test_as_native(): arr = np.arange(5) # native assert_equal(arr.dtype.byteorder, '=') narr = as_native_array(arr) assert_true(arr is narr) sdt = arr.dtype.newbyteorder('s') barr = arr.astype(sdt) assert_equal(barr.dtype.byteorder, SWAPPED_ORDER) narr = as_native_array(barr) assert_false(barr is narr) assert_array_equal(barr, narr) assert_equal(narr.dtype.byteorder, NATIVE_ORDER)
def test_tripwire(): # Test tripwire object silly_module_name = TripWire('We do not have silly_module_name') assert_raises(TripWireError, getattr, silly_module_name, 'do_silly_thing') assert_raises(TripWireError, silly_module_name) # Check AttributeError can be checked too try: silly_module_name.__wrapped__ except TripWireError as err: assert_true(isinstance(err, AttributeError)) else: raise RuntimeError("No error raised, but expected")
def test_as_native(): arr = np.arange(5) # native assert_equal(arr.dtype.byteorder, '=') narr = as_native_array(arr) assert_true(arr is narr) sdt = arr.dtype.newbyteorder('s') barr = arr.astype(sdt) assert_equal(barr.dtype.byteorder, SWAPPED_ORDER) narr = as_native_array(barr) assert_false(barr is narr) assert_array_equal(barr, narr) assert_equal(narr.dtype.byteorder, NATIVE_ORDER)
def test_sph_harm_ind_list(): m_list, n_list = sph_harm_ind_list(8) assert_equal(m_list.shape, n_list.shape) assert_equal(m_list.shape, (45, )) assert_true(np.all(np.abs(m_list) <= n_list)) assert_array_equal(n_list % 2, 0) assert_raises(ValueError, sph_harm_ind_list, 1) # Test for a full basis m_list, n_list = sph_harm_ind_list(8, True) assert_equal(m_list.shape, n_list.shape) # There are (sh_order + 1) * (sh_order + 1) coefficients assert_equal(m_list.shape, (81, )) assert_true(np.all(np.abs(m_list) <= n_list))
def test_split_flow(): with TemporaryDirectory() as out_dir: split_flow = SplitFlow() data_path, _, _ = get_fnames() volume, affine = load_nifti(data_path) split_flow.run(data_path, out_dir=out_dir) assert_true( os.path.isfile(split_flow.last_generated_outputs['out_split'])) split_flow._force_overwrite = True split_flow.run(data_path, vol_idx=0, out_dir=out_dir) split_path = split_flow.last_generated_outputs['out_split'] assert_true(os.path.isfile(split_path)) split_data, split_affine = load_nifti(split_path) npt.assert_equal(split_data.shape, volume[..., 0].shape) npt.assert_array_almost_equal(split_affine, affine)
def test_cluster_attributes_and_constructor(): cluster = Cluster() assert_equal(type(cluster), Cluster) assert_equal(cluster.id, 0) assert_array_equal(cluster.indices, []) assert_equal(len(cluster), 0) # Duplicate assert_true( cluster == Cluster(cluster.id, cluster.indices, cluster.refdata)) assert_false( cluster != Cluster(cluster.id, cluster.indices, cluster.refdata)) # Invalid comparison assert_raises(TypeError, cluster.__cmp__, cluster)
def test_tripwire(): # Test tripwire object silly_module_name = TripWire('We do not have silly_module_name') assert_raises(TripWireError, getattr, silly_module_name, 'do_silly_thing') assert_raises(TripWireError, silly_module_name) # Check AttributeError can be checked too try: silly_module_name.__wrapped__ except TripWireError as err: assert_true(isinstance(err, AttributeError)) else: raise RuntimeError("No error raised, but expected")
def test_random_seeds_from_mask(): mask = np.random.randint(0, 1, size=(4, 6, 3)) seeds = random_seeds_from_mask(mask, seeds_count=24, seed_count_per_voxel=True) npt.assert_equal(mask.sum() * 24, len(seeds)) seeds = random_seeds_from_mask(mask, seeds_count=0, seed_count_per_voxel=True) npt.assert_equal(0, len(seeds)) mask[:] = False mask[2, 2, 2] = True seeds = random_seeds_from_mask(mask, seeds_count=8, seed_count_per_voxel=True) npt.assert_equal(mask.sum() * 8, len(seeds)) assert_true(np.all((seeds > 1.5) & (seeds < 2.5))) seeds = random_seeds_from_mask(mask, seeds_count=24, seed_count_per_voxel=False) npt.assert_equal(24, len(seeds)) seeds = random_seeds_from_mask(mask, seeds_count=0, seed_count_per_voxel=False) npt.assert_equal(0, len(seeds)) mask[:] = False mask[2, 2, 2] = True seeds = random_seeds_from_mask(mask, seeds_count=100, seed_count_per_voxel=False) npt.assert_equal(100, len(seeds)) assert_true(np.all((seeds > 1.5) & (seeds < 2.5))) mask = np.zeros((15, 15, 15)) mask[2:14, 2:14, 2:14] = 1 seeds_npv_2 = random_seeds_from_mask(mask, seeds_count=2, seed_count_per_voxel=True, random_seed=0)[:150] seeds_npv_3 = random_seeds_from_mask(mask, seeds_count=3, seed_count_per_voxel=True, random_seed=0)[:150] assert_true(np.all(seeds_npv_2 == seeds_npv_3)) seeds_nt_150 = random_seeds_from_mask(mask, seeds_count=150, seed_count_per_voxel=False, random_seed=0)[:150] seeds_nt_500 = random_seeds_from_mask(mask, seeds_count=500, seed_count_per_voxel=False, random_seed=0)[:150] assert_true(np.all(seeds_nt_150 == seeds_nt_500))
def test_feature_resample(): from dipy.tracking.streamline import set_number_of_points # Test subclassing Feature class ResampleFeature(dipysfeature.Feature): def __init__(self, nb_points): super(ResampleFeature, self).__init__(is_order_invariant=False) self.nb_points = nb_points if nb_points <= 0: msg = ("ResampleFeature: `nb_points` must be strictly" " positive: {0}").format(nb_points) raise ValueError(msg) def infer_shape(self, streamline): return (self.nb_points, streamline.shape[1]) def extract(self, streamline): return set_number_of_points(streamline, self.nb_points) assert_raises(ValueError, dipysfeature.ResampleFeature, nb_points=0) assert_raises(ValueError, ResampleFeature, nb_points=0) max_points = max(map(len, [s1, s2, s3, s4])) for nb_points in [2, 5, 2 * max_points]: for feature in [ dipysfeature.ResampleFeature(nb_points), ResampleFeature(nb_points) ]: for s in [s1, s2, s3, s4]: # Test method infer_shape assert_equal(feature.infer_shape(s), (nb_points, s.shape[1])) # Test method extract features = feature.extract(s) assert_equal(features.shape, (nb_points, s.shape[1])) assert_array_almost_equal(features, set_number_of_points(s, nb_points)) # This feature type is not order invariant assert_false(feature.is_order_invariant) for s in [s1, s2, s3, s4]: features = feature.extract(s) features_flip = feature.extract(s[::-1]) assert_array_equal(features_flip, set_number_of_points(s[::-1], nb_points)) assert_true(np.any(np.not_equal(features, features_flip)))
def test_cluster_centroid_attributes_and_constructor(): centroid = np.zeros(features_shape) cluster = ClusterCentroid(centroid) assert_equal(type(cluster), ClusterCentroid) assert_equal(cluster.id, 0) assert_array_equal(cluster.indices, []) assert_array_equal(cluster.centroid, np.zeros(features_shape)) assert_equal(len(cluster), 0) # Duplicate assert_true(cluster == ClusterCentroid(centroid)) assert_false(cluster != ClusterCentroid(centroid)) assert_false(cluster == ClusterCentroid(centroid + 1)) # Invalid comparison assert_raises(TypeError, cluster.__cmp__, cluster)
def test_ba(): with TemporaryDirectory() as dirpath: streams, hdr = nib.trackvis.read(get_fnames('fornix')) fornix = [s[0] for s in streams] f = Streamlines(fornix) mb = os.path.join(dirpath, "model_bundles") os.mkdir(mb) save_trk(os.path.join(mb, "temp.trk"), f, affine=np.eye(4)) rb = os.path.join(dirpath, "rec_bundles") os.mkdir(rb) save_trk(os.path.join(rb, "temp.trk"), f, affine=np.eye(4)) ob = os.path.join(dirpath, "org_bundles") os.mkdir(ob) save_trk(os.path.join(ob, "temp.trk"), f, affine=np.eye(4)) dt = os.path.join(dirpath, "dti_measures") os.mkdir(dt) fa = np.random.rand(255, 255, 255) save_nifti(os.path.join(dt, "fa.nii.gz"), fa, affine=np.eye(4)) out_dir = os.path.join(dirpath, "output") os.mkdir(out_dir) bundle_analysis(mb, rb, ob, dt, group="patient", subject="10001", no_disks=100, out_dir=out_dir) assert_true(os.path.exists(os.path.join(out_dir, 'fa.h5')))
def test_nlmeans_flow(): with TemporaryDirectory() as out_dir: data_path, _, _ = get_fnames() volume, affine = load_nifti(data_path) nlmeans_flow = NLMeansFlow() nlmeans_flow.run(data_path, out_dir=out_dir) assert_true(os.path.isfile( nlmeans_flow.last_generated_outputs['out_denoised'])) nlmeans_flow._force_overwrite = True nlmeans_flow.run(data_path, sigma=4, out_dir=out_dir) denoised_path = nlmeans_flow.last_generated_outputs['out_denoised'] assert_true(os.path.isfile(denoised_path)) denoised_data, denoised_affine = load_nifti(denoised_path) npt.assert_equal(denoised_data.shape, volume.shape) npt.assert_array_almost_equal(denoised_affine, affine)
def test_feature_resample(): from dipy.tracking.streamline import set_number_of_points # Test subclassing Feature class ResampleFeature(dipymetric.Feature): def __init__(self, nb_points): super(ResampleFeature, self).__init__(is_order_invariant=False) self.nb_points = nb_points if nb_points <= 0: msg = ("ResampleFeature: `nb_points` must be strictly" " positive: {0}").format(nb_points) raise ValueError(msg) def infer_shape(self, streamline): return (self.nb_points, streamline.shape[1]) def extract(self, streamline): return set_number_of_points(streamline, self.nb_points) assert_raises(ValueError, dipymetric.ResampleFeature, nb_points=0) assert_raises(ValueError, ResampleFeature, nb_points=0) max_points = max(map(len, [s1, s2, s3, s4])) for nb_points in [2, 5, 2*max_points]: for feature in [dipymetric.ResampleFeature(nb_points), ResampleFeature(nb_points)]: for s in [s1, s2, s3, s4]: # Test method infer_shape assert_equal(feature.infer_shape(s), (nb_points, s.shape[1])) # Test method extract features = feature.extract(s) assert_equal(features.shape, (nb_points, s.shape[1])) assert_array_almost_equal(features, set_number_of_points(s, nb_points)) # This feature type is not order invariant assert_false(feature.is_order_invariant) for s in [s1, s2, s3, s4]: features = feature.extract(s) features_flip = feature.extract(s[::-1]) assert_array_equal(features_flip, set_number_of_points(s[::-1], nb_points)) assert_true(np.any(np.not_equal(features, features_flip)))
def test_ba(): with TemporaryDirectory() as dirpath: streams, hdr = nib.trackvis.read(get_fnames('fornix')) fornix = [s[0] for s in streams] f = Streamlines(fornix) mb = os.path.join(dirpath, "model_bundles") os.mkdir(mb) save_trk(os.path.join(mb, "temp.trk"), f, affine=np.eye(4)) rb = os.path.join(dirpath, "rec_bundles") os.mkdir(rb) save_trk(os.path.join(rb, "temp.trk"), f, affine=np.eye(4)) ob = os.path.join(dirpath, "org_bundles") os.mkdir(ob) save_trk(os.path.join(ob, "temp.trk"), f, affine=np.eye(4)) dt = os.path.join(dirpath, "dti_measures") os.mkdir(dt) fa = np.random.rand(255, 255, 255) save_nifti(os.path.join(dt, "fa.nii.gz"), fa, affine=np.eye(4)) out_dir = os.path.join(dirpath, "output") os.mkdir(out_dir) bundle_analysis(mb, rb, ob, dt, group="patient", subject="10001", no_disks=100, out_dir=out_dir) assert_true(os.path.exists(os.path.join(out_dir, 'fa.h5')))
def test_random_seeds_from_mask(): mask = np.random.randint(0, 1, size=(4, 6, 3)) seeds = random_seeds_from_mask(mask, seeds_count=24, seed_count_per_voxel=True) npt.assert_equal(mask.sum() * 24, len(seeds)) seeds = random_seeds_from_mask(mask, seeds_count=0, seed_count_per_voxel=True) npt.assert_equal(0, len(seeds)) mask[:] = False mask[2, 2, 2] = True seeds = random_seeds_from_mask(mask, seeds_count=8, seed_count_per_voxel=True) npt.assert_equal(mask.sum() * 8, len(seeds)) assert_true(np.all((seeds > 1.5) & (seeds < 2.5))) seeds = random_seeds_from_mask(mask, seeds_count=24, seed_count_per_voxel=False) npt.assert_equal(24, len(seeds)) seeds = random_seeds_from_mask(mask, seeds_count=0, seed_count_per_voxel=False) npt.assert_equal(0, len(seeds)) mask[:] = False mask[2, 2, 2] = True seeds = random_seeds_from_mask(mask, seeds_count=100, seed_count_per_voxel=False) npt.assert_equal(100, len(seeds)) assert_true(np.all((seeds > 1.5) & (seeds < 2.5))) mask = np.zeros((15, 15, 15)) mask[2:14, 2:14, 2:14] = 1 seeds_npv_2 = random_seeds_from_mask(mask, seeds_count=2, seed_count_per_voxel=True, random_seed=0)[:150] seeds_npv_3 = random_seeds_from_mask(mask, seeds_count=3, seed_count_per_voxel=True, random_seed=0)[:150] assert_true(np.all(seeds_npv_2 == seeds_npv_3)) seeds_nt_150 = random_seeds_from_mask(mask, seeds_count=150, seed_count_per_voxel=False, random_seed=0)[:150] seeds_nt_500 = random_seeds_from_mask(mask, seeds_count=500, seed_count_per_voxel=False, random_seed=0)[:150] assert_true(np.all(seeds_nt_150 == seeds_nt_500))
def test_seeds_from_mask(): mask = np.random.randint(0, 1, size=(10, 10, 10)) seeds = seeds_from_mask(mask, density=1) npt.assert_equal(mask.sum(), len(seeds)) npt.assert_array_equal(np.argwhere(mask), seeds) mask[:] = False mask[3, 3, 3] = True seeds = seeds_from_mask(mask, density=[3, 4, 5]) npt.assert_equal(len(seeds), 3 * 4 * 5) assert_true(np.all((seeds > 2.5) & (seeds < 3.5))) mask[4, 4, 4] = True seeds = seeds_from_mask(mask, density=[3, 4, 5]) npt.assert_equal(len(seeds), 2 * 3 * 4 * 5) assert_true(np.all((seeds > 2.5) & (seeds < 4.5))) in_333 = ((seeds > 2.5) & (seeds < 3.5)).all(1) npt.assert_equal(in_333.sum(), 3 * 4 * 5) in_444 = ((seeds > 3.5) & (seeds < 4.5)).all(1) npt.assert_equal(in_444.sum(), 3 * 4 * 5)
def test_seeds_from_mask(): mask = np.random.randint(0, 1, size=(10, 10, 10)) seeds = seeds_from_mask(mask, np.eye(4), density=1) npt.assert_equal(mask.sum(), len(seeds)) npt.assert_array_equal(np.argwhere(mask), seeds) mask[:] = False mask[3, 3, 3] = True seeds = seeds_from_mask(mask, np.eye(4), density=[3, 4, 5]) npt.assert_equal(len(seeds), 3 * 4 * 5) assert_true(np.all((seeds > 2.5) & (seeds < 3.5))) mask[4, 4, 4] = True seeds = seeds_from_mask(mask, np.eye(4), density=[3, 4, 5]) npt.assert_equal(len(seeds), 2 * 3 * 4 * 5) assert_true(np.all((seeds > 2.5) & (seeds < 4.5))) in_333 = ((seeds > 2.5) & (seeds < 3.5)).all(1) npt.assert_equal(in_333.sum(), 3 * 4 * 5) in_444 = ((seeds > 3.5) & (seeds < 4.5)).all(1) npt.assert_equal(in_444.sum(), 3 * 4 * 5)
def test_skipper(): def f(): pass docstring = \ """ Header >>> something # skip if not HAVE_AMODULE >>> something + else >>> a = 1 # skip if not HAVE_BMODULE >>> something2 # skip if HAVE_AMODULE """ f.__doc__ = docstring global HAVE_AMODULE, HAVE_BMODULE HAVE_AMODULE = False HAVE_BMODULE = True f2 = doctest_skip_parser(f) assert_true(f is f2) assert_equal( f2.__doc__, """ Header >>> something # doctest: +SKIP >>> something + else >>> a = 1 >>> something2 """) HAVE_AMODULE = True HAVE_BMODULE = False f.__doc__ = docstring f2 = doctest_skip_parser(f) assert_true(f is f2) assert_equal( f2.__doc__, """ Header >>> something >>> something + else >>> a = 1 # doctest: +SKIP >>> something2 # doctest: +SKIP """) del HAVE_AMODULE f.__doc__ = docstring assert_raises(NameError, doctest_skip_parser, f)
def test_bundle_analysis_population_flow(): with TemporaryDirectory() as dirpath: streams, hdr = nib.trackvis.read(get_fnames('fornix')) fornix = [s[0] for s in streams] f = Streamlines(fornix) mb = os.path.join(dirpath, "model_bundles") sub = os.path.join(dirpath, "subjects") os.mkdir(mb) save_trk(os.path.join(mb, "temp.trk"), f, affine=np.eye(4)) os.mkdir(sub) os.mkdir(os.path.join(sub, "patient")) os.mkdir(os.path.join(sub, "control")) p = os.path.join(sub, "patient", "10001") os.mkdir(p) c = os.path.join(sub, "control", "20002") os.mkdir(c) for pre in [p, c]: os.mkdir(os.path.join(pre, "rec_bundles")) save_trk(os.path.join(pre, "rec_bundles", "temp.trk"), f, affine=np.eye(4)) os.mkdir(os.path.join(pre, "org_bundles")) save_trk(os.path.join(pre, "org_bundles", "temp.trk"), f, affine=np.eye(4)) os.mkdir(os.path.join(pre, "measures")) fa = np.random.rand(255, 255, 255) save_nifti(os.path.join(pre, "measures", "fa.nii.gz"), fa, affine=np.eye(4)) out_dir = os.path.join(dirpath, "output") os.mkdir(out_dir) ba_flow = BundleAnalysisPopulationFlow() ba_flow.run(mb, sub, out_dir=out_dir) assert_true(os.path.exists(os.path.join(out_dir, 'fa.h5'))) dft = pd.read_hdf(os.path.join(out_dir, 'fa.h5')) assert_true(dft.bundle.unique() == "temp") assert_true(set(dft.subject.unique()) == set(['10001', '20002']))
def test_cluster_map_centroid_iter(): rng = np.random.RandomState(42) nb_clusters = 11 cluster_map = ClusterMapCentroid() clusters = [] for i in range(nb_clusters): new_centroid = np.zeros_like(features) new_cluster = ClusterCentroid(new_centroid, indices=rng.randint(0, len(data), size=10)) cluster_map.add_cluster(new_cluster) clusters.append(new_cluster) assert_true(all([c1 is c2 for c1, c2 in zip(cluster_map.clusters, clusters)])) assert_array_equal(cluster_map, clusters) assert_array_equal(cluster_map.clusters, clusters) assert_array_equal(cluster_map, [cluster.indices for cluster in clusters]) # Set refdata cluster_map.refdata = data for c1, c2 in zip(cluster_map, clusters): assert_arrays_equal(c1, [data[i] for i in c2.indices])
def test_connectivity_matrix(): label_volume = np.array([[[3, 0, 0], [0, 0, 0], [0, 0, 4]]]) streamlines = [np.array([[0, 0, 0], [0, 0, 0], [0, 2, 2]], 'float'), np.array([[0, 0, 0], [0, 1, 1], [0, 2, 2]], 'float'), np.array([[0, 2, 2], [0, 1, 1], [0, 0, 0]], 'float')] expected = np.zeros((5, 5), 'int') expected[3, 4] = 2 expected[4, 3] = 1 # Check basic Case matrix = connectivity_matrix(streamlines, label_volume, (1, 1, 1), symmetric=False) npt.assert_array_equal(matrix, expected) # Test mapping matrix, mapping = connectivity_matrix(streamlines, label_volume, (1, 1, 1), symmetric=False, return_mapping=True) npt.assert_array_equal(matrix, expected) npt.assert_equal(mapping[3, 4], [0, 1]) npt.assert_equal(mapping[4, 3], [2]) npt.assert_equal(mapping.get((0, 0)), None) # Test mapping and symmetric matrix, mapping = connectivity_matrix(streamlines, label_volume, (1, 1, 1), symmetric=True, return_mapping=True) npt.assert_equal(mapping[3, 4], [0, 1, 2]) # When symmetric only (3,4) is a key, not (4, 3) npt.assert_equal(mapping.get((4, 3)), None) # expected output matrix is symmetric version of expected expected = expected + expected.T npt.assert_array_equal(matrix, expected) # Test mapping_as_streamlines, mapping dict has lists of streamlines matrix, mapping = connectivity_matrix(streamlines, label_volume, (1, 1, 1), symmetric=False, return_mapping=True, mapping_as_streamlines=True) assert_true(mapping[3, 4][0] is streamlines[0]) assert_true(mapping[3, 4][1] is streamlines[1]) assert_true(mapping[4, 3][0] is streamlines[2]) # Test passing affine to connectivity_matrix affine = np.diag([-1, -1, -1, 1.]) streamlines = [-i for i in streamlines] matrix = connectivity_matrix(streamlines, label_volume, affine=affine) # In the symmetrical case, the matrix should be, well, symmetric: npt.assert_equal(matrix[4, 3], matrix[4, 3])
def _target(target_f, streamlines, voxel_both_true, voxel_one_true, test_bad_points): affine = np.eye(4) mask = np.zeros((4, 4, 4), dtype=bool) # Both pass though mask[voxel_both_true] = True new = list(target_f(streamlines, mask, affine=affine)) npt.assert_equal(len(new), 2) new = list(target_f(streamlines, mask, affine=affine, include=False)) npt.assert_equal(len(new), 0) # only first mask[:] = False mask[voxel_one_true] = True new = list(target_f(streamlines, mask, affine=affine)) npt.assert_equal(len(new), 1) assert_true(new[0] is streamlines[0]) new = list(target_f(streamlines, mask, affine=affine, include=False)) npt.assert_equal(len(new), 1) assert_true(new[0] is streamlines[1]) # Test that bad points raise a value error if test_bad_points: bad_sl = streamlines + [np.array([[10.0, 10.0, 10.0]])] new = target_f(bad_sl, mask, affine=affine) npt.assert_raises(ValueError, list, new) bad_sl = streamlines + [-np.array([[10.0, 10.0, 10.0]])] new = target_f(bad_sl, mask, affine=affine) npt.assert_raises(ValueError, list, new) # Test smaller voxels affine = np.array([[.3, 0, 0, 0], [0, .2, 0, 0], [0, 0, .4, 0], [0, 0, 0, 1]]) streamlines = list(move_streamlines(streamlines, affine)) new = list(target_f(streamlines, mask, affine=affine)) npt.assert_equal(len(new), 1) assert_true(new[0] is streamlines[0]) new = list(target_f(streamlines, mask, affine=affine, include=False)) npt.assert_equal(len(new), 1) assert_true(new[0] is streamlines[1]) # Test that changing mask or affine does not break target/target_line_based include = target_f(streamlines, mask, affine=affine) exclude = target_f(streamlines, mask, affine=affine, include=False) affine[:] = np.eye(4) mask[:] = False include = list(include) exclude = list(exclude) npt.assert_equal(len(include), 1) assert_true(include[0] is streamlines[0]) npt.assert_equal(len(exclude), 1) assert_true(exclude[0] is streamlines[1])
def test_stats(): with TemporaryDirectory() as out_dir: data_path, bval_path, bvec_path = get_fnames('small_101D') vol_img = nib.load(data_path) volume = vol_img.get_data() mask = np.ones_like(volume[:, :, :, 0]) mask_img = nib.Nifti1Image(mask.astype(np.uint8), vol_img.affine) mask_path = join(out_dir, 'tmp_mask.nii.gz') nib.save(mask_img, mask_path) snr_flow = SNRinCCFlow(force=True) args = [data_path, bval_path, bvec_path, mask_path] snr_flow.run(*args, out_dir=out_dir) assert_true(os.path.exists(os.path.join(out_dir, 'product.json'))) assert_true(os.stat(os.path.join( out_dir, 'product.json')).st_size != 0) assert_true(os.path.exists(os.path.join(out_dir, 'cc.nii.gz'))) assert_true(os.stat(os.path.join(out_dir, 'cc.nii.gz')).st_size != 0) assert_true(os.path.exists(os.path.join(out_dir, 'mask_noise.nii.gz'))) assert_true(os.stat(os.path.join( out_dir, 'mask_noise.nii.gz')).st_size != 0) snr_flow._force_overwrite = True snr_flow.run(*args, out_dir=out_dir) assert_true(os.path.exists(os.path.join(out_dir, 'product.json'))) assert_true(os.stat(os.path.join( out_dir, 'product.json')).st_size != 0) assert_true(os.path.exists(os.path.join(out_dir, 'cc.nii.gz'))) assert_true(os.stat(os.path.join(out_dir, 'cc.nii.gz')).st_size != 0) assert_true(os.path.exists(os.path.join(out_dir, 'mask_noise.nii.gz'))) assert_true(os.stat(os.path.join( out_dir, 'mask_noise.nii.gz')).st_size != 0) snr_flow._force_overwrite = True snr_flow.run(*args, bbox_threshold=(0.5, 1, 0, 0.15, 0, 0.2), out_dir=out_dir) assert_true(os.path.exists(os.path.join(out_dir, 'product.json'))) assert_true(os.stat(os.path.join( out_dir, 'product.json')).st_size != 0) assert_true(os.path.exists(os.path.join(out_dir, 'cc.nii.gz'))) assert_true(os.stat(os.path.join(out_dir, 'cc.nii.gz')).st_size != 0) assert_true(os.path.exists(os.path.join(out_dir, 'mask_noise.nii.gz'))) assert_true(os.stat(os.path.join( out_dir, 'mask_noise.nii.gz')).st_size != 0)
def test_is_tripwire(): assert_false(is_tripwire(object())) assert_true(is_tripwire(TripWire('some message')))
def test_metric_minimum_average_direct_flip(): feature = dipymetric.IdentityFeature() class MinimumAverageDirectFlipMetric(dipymetric.Metric): def __init__(self, feature): super(MinimumAverageDirectFlipMetric, self).__init__( feature=feature) @property def is_order_invariant(self): return True # Ordering is handled in the distance computation def are_compatible(self, shape1, shape2): return shape1[0] == shape2[0] def dist(self, v1, v2): def average_euclidean(x, y): return np.mean(norm(x-y, axis=1)) dist_direct = average_euclidean(v1, v2) dist_flipped = average_euclidean(v1, v2[::-1]) return min(dist_direct, dist_flipped) for metric in [MinimumAverageDirectFlipMetric(feature), dipymetric.MinimumAverageDirectFlipMetric(feature)]: # Test special cases of the MDF distance. assert_equal(metric.dist(s, s), 0.) assert_equal(metric.dist(s, s[::-1]), 0.) # Translation offset = np.array([0.8, 1.3, 5], dtype=dtype) assert_almost_equal(metric.dist(s, s+offset), norm(offset), 5) # Scaling M_scaling = np.diag([1.2, 2.8, 3]).astype(dtype) s_mean = np.mean(s, axis=0) s_zero_mean = s - s_mean s_scaled = np.dot(M_scaling, s_zero_mean.T).T + s_mean d = np.mean(norm((np.diag(M_scaling)-1)*s_zero_mean, axis=1)) assert_almost_equal(metric.dist(s, s_scaled), d, 5) # Rotation from dipy.core.geometry import rodrigues_axis_rotation rot_axis = np.array([1, 2, 3], dtype=dtype) M_rotation = rodrigues_axis_rotation(rot_axis, 60.).astype(dtype) s_mean = np.mean(s, axis=0) s_zero_mean = s - s_mean s_rotated = np.dot(M_rotation, s_zero_mean.T).T + s_mean opposite = norm(np.cross(rot_axis, s_zero_mean), axis=1) / norm(rot_axis) distances = np.sqrt(2*opposite**2 * (1 - np.cos(60.*np.pi/180.))).astype(dtype) d = np.mean(distances) assert_almost_equal(metric.dist(s, s_rotated), d, 5) # All possible pairs for s1, s2 in itertools.product(*[streamlines]*2): # Extract features since metric doesn't work # directly on streamlines f1 = metric.feature.extract(s1) f2 = metric.feature.extract(s2) # Test method are_compatible same_nb_points = f1.shape[0] == f2.shape[0] assert_equal(metric.are_compatible(f1.shape, f2.shape), same_nb_points) # Test method dist if features are compatible if metric.are_compatible(f1.shape, f2.shape): distance = metric.dist(f1, f2) if np.all(f1 == f2): assert_equal(distance, 0.) assert_almost_equal(distance, dipymetric.dist(metric, s1, s2)) assert_almost_equal(distance, dipymetric.mdf(s1, s2)) assert_greater_equal(distance, 0.) # This metric type is order invariant assert_true(metric.is_order_invariant) # All possible pairs for s1, s2 in itertools.product(*[streamlines]*2): f1 = metric.feature.extract(s1) f2 = metric.feature.extract(s2) if not metric.are_compatible(f1.shape, f2.shape): continue f1_flip = metric.feature.extract(s1[::-1]) f2_flip = metric.feature.extract(s2[::-1]) distance = metric.dist(f1, f2) assert_almost_equal(metric.dist(f1_flip, f2_flip), distance) if not np.all(f1_flip == f2_flip): assert_true(np.allclose(metric.dist(f1, f2_flip), distance)) assert_true(np.allclose(metric.dist(f1_flip, f2), distance))
def test_gqiodf(): # read bvals,gradients and data bvals = np.load(opj(os.path.dirname(__file__), 'data', 'small_64D.bvals.npy')) gradients = np.load(opj(os.path.dirname(__file__), 'data', 'small_64D.gradients.npy')) img = ni.load(os.path.join(os.path.dirname(__file__), 'data', 'small_64D.nii')) data = img.get_data() # print(bvals.shape) # print(gradients.shape) # print(data.shape) # t1=time.clock() gq.GeneralizedQSampling(data, bvals, gradients) ten = dt.Tensor(data, bvals, gradients, thresh=50) ten.fa() x, y, z, a, b = ten.evecs.shape evecs = ten.evecs xyz = x * y * z evecs = evecs.reshape(xyz, 3, 3) # vs = np.sign(evecs[:,2,:]) # print vs.shape # print np.hstack((vs,vs,vs)).reshape(1000,3,3).shape # evecs = np.hstack((vs,vs,vs)).reshape(1000,3,3) # print evecs.shape evals = ten.evals evals = evals.reshape(xyz, 3) # print evals.shape # print('GQS in %d' %(t2-t1)) eds = np.load(opj(os.path.dirname(__file__), '..', 'matrices', 'evenly_distributed_sphere_362.npz')) odf_vertices = eds['vertices'] odf_faces = eds['faces'] # Yeh et.al, IEEE TMI, 2010 # calculate the odf using GQI scaling = np.sqrt(bvals * 0.01506) # 0.01506 = 6*D where D is the free # water diffusion coefficient # l_values sqrt(6 D tau) D free water # diffusion coefficiet and tau included in the b-value tmp = np.tile(scaling, (3, 1)) b_vector = gradients.T*tmp Lambda = 1.2 # smoothing parameter - diffusion sampling length q2odf_params = np.sinc(np.dot(b_vector.T, odf_vertices.T) * Lambda/np.pi) # implements equation no. 9 from Yeh et.al. S = data.copy() x, y, z, g = S.shape S = S.reshape(x * y * z, g) QA = np.zeros((x * y * z, 5)) IN = np.zeros((x * y * z, 5)) fwd = 0 # Calculate Quantitative Anisotropy and find the peaks and the indices # for every voxel summary = {} summary['vertices'] = odf_vertices v = odf_vertices.shape[0] summary['faces'] = odf_faces f = odf_faces.shape[0] """ If e = number_of_edges the Euler formula says f-e+v = 2 for a mesh on a sphere Here, assuming we have a healthy triangulation every face is a triangle, all 3 of whose edges should belong to exactly two faces = so 2*e = 3*f to avoid division we test whether 2*f - 3*f + 2*v == 4 or equivalently 2*v - f == 4 """ assert_equal(2 * v - f, 4, 'Direct Euler test fails') assert_true(meshes.euler_characteristic_check(odf_vertices, odf_faces, chi=2), 'euler_characteristic_check fails') coarse = meshes.coarseness(odf_faces) print('coarseness: ', coarse) for i, s in enumerate(S): # print 'Volume %d' % i istr = str(i) summary[istr] = {} odf = Q2odf(s, q2odf_params) peaks, inds = rp.peak_finding(odf, odf_faces) fwd = max(np.max(odf), fwd) peaks = peaks - np.min(odf) l = min(len(peaks), 5) QA[i][:l] = peaks[:l] IN[i][:l] = inds[:l] summary[istr]['odf'] = odf summary[istr]['peaks'] = peaks summary[istr]['inds'] = inds summary[istr]['evecs'] = evecs[i, :, :] summary[istr]['evals'] = evals[i, :] QA /= fwd # QA=QA.reshape(x,y,z,5) # IN=IN.reshape(x,y,z,5) # print('Old %d secs' %(time.clock() - t2)) # assert_equal((gqs.QA-QA).max(),0.,'Frank QA different than our QA') # assert_equal((gqs.QA.shape),QA.shape, 'Frank QA shape is different') # assert_equal((gqs.QA-QA).max(), 0.) # import dipy.core.track_propagation as tp # tp.FACT_Delta(QA,IN) # return tp.FACT_Delta(QA,IN,seeds_no=10000).tracks peaks_1 = [i for i in range(1000) if len(summary[str(i)]['inds']) == 1] peaks_2 = [i for i in range(1000) if len(summary[str(i)]['inds']) == 2] peaks_3 = [i for i in range(1000) if len(summary[str(i)]['inds']) == 3] # correct numbers of voxels with respectively 1,2,3 ODF/QA peaks assert_array_equal((len(peaks_1), len(peaks_2), len(peaks_3)), (790, 196, 14), 'error in numbers of QA/ODF peaks') # correct indices of odf directions for voxels 0,10,44 # with respectively 1,2,3 ODF/QA peaks assert_array_equal(summary['0']['inds'], [116], 'wrong peak indices for voxel 0') assert_array_equal(summary['10']['inds'], [105, 78], 'wrong peak indices for voxel 10') assert_array_equal(summary['44']['inds'], [95, 84, 108], 'wrong peak indices for voxel 44') assert_equal(np.argmax(summary['0']['odf']), 116) assert_equal(np.argmax(summary['10']['odf']), 105) assert_equal(np.argmax(summary['44']['odf']), 95) # pole_1 = summary['vertices'][116] # print 'pole_1', pole_1 # pole_2 = summary['vertices'][105] # print 'pole_2', pole_2 # pole_3 = summary['vertices'][95] # print 'pole_3', pole_3 vertices = summary['vertices'] width = 0.02 # 0.3 #0.05 """ print('pole_1 equator contains:', len([i for i,v in enumerate(vertices) if np.abs(np.dot(v,pole_1)) < width]) print('pole_2 equator contains:', len([i for i,v in enumerate(vertices) if np.abs(np.dot(v,pole_2)) < width]) print('pole_3 equator contains:', len([i for i,v in enumerate(vertices) if np.abs(np.dot(v,pole_3)) < width]) """ # print 'pole_1 equator contains:', len(meshes.equatorial_vertices(vertices,pole_1,width)) # print 'pole_2 equator contains:', len(meshes.equatorial_vertices(vertices,pole_2,width)) # print 'pole_3 equator contains:', len(meshes'equatorial_vertices(vertices,pole_3,width)) # print triple_odf_maxima(vertices,summary['0']['odf'],width) # print triple_odf_maxima(vertices,summary['10']['odf'],width) # print triple_odf_maxima(vertices,summary['44']['odf'],width) # print summary['0']['evals'] """ pole=np.array([0,0,1]) from dipy.viz import fos r=fos.ren() fos.add(r,fos.point(pole,fos.green)) for i,ev in enumerate(vertices): if np.abs(np.dot(ev,pole))<width: fos.add(r,fos.point(ev,fos.red)) fos.show(r) """ triple = triple_odf_maxima(vertices, summary['10']['odf'], width) indmax1, odfmax1 = triple[0] indmax2, odfmax2 = triple[1] indmax3, odfmax3 = triple[2] """ from dipy.viz import fos r=fos.ren() for v in vertices: fos.add(r,fos.point(v,fos.cyan)) fos.add(r,fos.sphere(upper_hemi_map(vertices[indmax1]),radius=0.1,color=fos.red)) #fos.add(r,fos.line(np.array([0,0,0]),vertices[indmax1])) fos.add(r,fos.sphere(upper_hemi_map(vertices[indmax2]),radius=0.05,color=fos.green)) fos.add(r,fos.sphere(upper_hemi_map(vertices[indmax3]),radius=0.025,color=fos.blue)) fos.add(r,fos.sphere(upper_hemi_map(summary['0']['evecs'][:,0]),radius=0.1,color=fos.red,opacity=0.7)) fos.add(r,fos.sphere(upper_hemi_map(summary['0']['evecs'][:,1]),radius=0.05,color=fos.green,opacity=0.7)) fos.add(r,fos.sphere(upper_hemi_map(summary['0']['evecs'][:,2]),radius=0.025,color=fos.blue,opacity=0.7)) fos.add(r,fos.sphere([0,0,0],radius=0.01,color=fos.white)) fos.show(r) """ mat = np.vstack([vertices[indmax1], vertices[indmax2], vertices[indmax3]]) print(np.dot(mat, np.transpose(mat))) # this is to assess how othogonal the triple is/are print(np.dot(summary['0']['evecs'], np.transpose(mat)))