Пример #1
0
def test_slr_flow():
    with TemporaryDirectory() as out_dir:
        data_path = get_fnames('fornix')

        streams, hdr = nib.trackvis.read(data_path)
        fornix = [s[0] for s in streams]

        f = Streamlines(fornix)
        f1 = f.copy()

        f1_path = pjoin(out_dir, "f1.trk")
        save_trk(f1_path, Streamlines(f1), affine=np.eye(4))

        f2 = f1.copy()
        f2._data += np.array([50, 0, 0])

        f2_path = pjoin(out_dir, "f2.trk")
        save_trk(f2_path, Streamlines(f2), affine=np.eye(4))

        slr_flow = SlrWithQbxFlow(force=True)
        slr_flow.run(f1_path, f2_path)

        out_path = slr_flow.last_generated_outputs['out_moved']

        npt.assert_equal(os.path.isfile(out_path), True)
Пример #2
0
def test_rb_no_neighb():
    # what if no neighbors are found? No recognition

    b = Streamlines(fornix)
    b1 = b.copy()

    b2 = b1[:20].copy()
    b2._data += np.array([100, 0, 0])

    b3 = b1[:20].copy()
    b3._data += np.array([300, 0, 0])

    b.extend(b3)

    rb = RecoBundles(b, greater_than=0, clust_thr=10)

    rec_trans, rec_labels = rb.recognize(model_bundle=b2,
                                         model_clust_thr=5.,
                                         reduction_thr=10)

    if len(rec_trans) > 0:
        refine_trans, refine_labels = rb.refine(model_bundle=b2,
                                                pruned_streamlines=rec_trans,
                                                model_clust_thr=5.,
                                                reduction_thr=10)

        assert_equal(len(refine_labels), 0)
        assert_equal(len(refine_trans), 0)

    else:
        assert_equal(len(rec_labels), 0)
        assert_equal(len(rec_trans), 0)
Пример #3
0
def remove_clusters_by_size(clusters, min_size=0):
    ob = filter(lambda c: len(c) >= min_size, clusters)

    centroids = Streamlines()
    for cluster in ob:
        centroids.append(cluster.centroid)

    return centroids
Пример #4
0
def test_whole_brain_slr():
    streams, hdr = nib.trackvis.read(get_fnames('fornix'))
    fornix = [s[0] for s in streams]

    f = Streamlines(fornix)
    f1 = f.copy()
    f2 = f.copy()

    # check translation
    f2._data += np.array([50, 0, 0])

    moved, transform, qb_centroids1, qb_centroids2 = whole_brain_slr(
            f1, f2, x0='affine', verbose=True, rm_small_clusters=2,
            greater_than=0, less_than=np.inf,
            qbx_thr=[5, 2, 1], progressive=False)

    # we can check the quality of registration by comparing the matrices
    # MAM streamline distances before and after SLR
    D12 = bundles_distances_mam(f1, f2)
    D1M = bundles_distances_mam(f1, moved)

    d12_minsum = np.sum(np.min(D12, axis=0))
    d1m_minsum = np.sum(np.min(D1M, axis=0))

    print("distances= ", d12_minsum, " ", d1m_minsum)

    assert_equal(d1m_minsum < d12_minsum, True)

    assert_array_almost_equal(transform[:3, 3], [-50, -0, -0], 2)

    # check rotation

    mat = compose_matrix44([0, 0, 0, 15, 0, 0])

    f3 = f.copy()
    f3 = transform_streamlines(f3, mat)

    moved, transform, qb_centroids1, qb_centroids2 = slr_with_qbx(
            f1, f3, verbose=False, rm_small_clusters=1, greater_than=20,
            less_than=np.inf, qbx_thr=[2],
            progressive=True)

    # we can also check the quality by looking at the decomposed transform

    assert_array_almost_equal(decompose_matrix44(transform)[3], -15, 2)

    moved, transform, qb_centroids1, qb_centroids2 = slr_with_qbx(
            f1, f3, verbose=False, rm_small_clusters=1, select_random=400,
            greater_than=20, less_than=np.inf, qbx_thr=[2],
            progressive=True)

    # we can also check the quality by looking at the decomposed transform

    assert_array_almost_equal(decompose_matrix44(transform)[3], -15, 2)
Пример #5
0
def test_horizon():

    s1 = 10 * np.array([[0, 0, 0],
                        [1, 0, 0],
                        [2, 0, 0],
                        [3, 0, 0],
                        [4, 0, 0]], dtype='f8')

    s2 = 10 * np.array([[0, 0, 0],
                        [0, 1, 0],
                        [0, 2, 0],
                        [0, 3, 0],
                        [0, 4, 0]], dtype='f8')

    s3 = 10 * np.array([[0, 0, 0],
                        [1, 0.2, 0],
                        [2, 0.2, 0],
                        [3, 0.2, 0],
                        [4, 0.2, 0]], dtype='f8')

    print(s1.shape)
    print(s2.shape)
    print(s3.shape)

    streamlines = Streamlines()
    streamlines.append(s1)
    streamlines.append(s2)
    streamlines.append(s3)

    tractograms = [streamlines]
    images = None

    horizon(tractograms, images=images, cluster=True, cluster_thr=5,
            random_colors=False, length_lt=np.inf, length_gt=0,
            clusters_lt=np.inf, clusters_gt=0,
            world_coords=False, interactive=False)

    affine = np.diag([2., 1, 1, 1]).astype('f8')

    data = 255 * np.random.rand(150, 150, 150)

    images = [(data, affine)]

    horizon(tractograms, images=images, cluster=True, cluster_thr=5,
            random_colors=False, length_lt=np.inf, length_gt=0,
            clusters_lt=np.inf, clusters_gt=0,
            world_coords=True, interactive=False)

    tractograms = []
    horizon(tractograms, images=images, cluster=True, cluster_thr=5,
            random_colors=False, length_lt=np.inf, length_gt=0,
            clusters_lt=np.inf, clusters_gt=0,
            world_coords=True, interactive=False)
Пример #6
0
def test_streamlines_generator():
    # Test generator
    streamlines_generator = Streamlines(generate_sl(streamlines))
    npt.assert_equal(len(streamlines_generator), len(streamlines))
    # Nothing should change
    streamlines_generator.append(np.array([]))
    npt.assert_equal(len(streamlines_generator), len(streamlines))

    # Test append error
    npt.assert_raises(ValueError, streamlines_generator.append, streamlines)

    # Test empty streamlines
    streamlines_generator = Streamlines(np.array([]))
    npt.assert_equal(len(streamlines_generator), 0)
Пример #7
0
def test_recobundles_flow():
    with TemporaryDirectory() as out_dir:
        data_path = get_fnames('fornix')
        streams, hdr = nib.trackvis.read(data_path)
        fornix = [s[0] for s in streams]

        f = Streamlines(fornix)
        f1 = f.copy()

        f2 = f1[:15].copy()
        f2._data += np.array([40, 0, 0])

        f.extend(f2)

        f2_path = pjoin(out_dir, "f2.trk")
        save_trk(f2_path, f2, affine=np.eye(4))

        f1_path = pjoin(out_dir, "f1.trk")
        save_trk(f1_path, f, affine=np.eye(4))

        rb_flow = RecoBundlesFlow(force=True)
        rb_flow.run(f1_path, f2_path, greater_than=0, clust_thr=10,
                    model_clust_thr=5., reduction_thr=10, out_dir=out_dir)

        labels = rb_flow.last_generated_outputs['out_recognized_labels']
        recog_trk = rb_flow.last_generated_outputs['out_recognized_transf']

        rec_bundle, _ = load_trk(recog_trk)
        npt.assert_equal(len(rec_bundle) == len(f2), True)

        label_flow = LabelsBundlesFlow(force=True)
        label_flow.run(f1_path, labels)

        recog_bundle = label_flow.last_generated_outputs['out_bundle']
        rec_bundle_org, _ = load_trk(recog_bundle)

        BMD = BundleMinDistanceMetric()
        nb_pts = 20
        static = set_number_of_points(f2, nb_pts)
        moving = set_number_of_points(rec_bundle_org, nb_pts)

        BMD.setup(static, moving)
        x0 = np.array([0, 0, 0, 0, 0, 0, 1., 1., 1, 0, 0, 0])  # affine
        bmd_value = BMD.distance(x0.tolist())

        npt.assert_equal(bmd_value < 1, True)
Пример #8
0
def test_afq_profile():
    data = np.ones((10, 10, 10))
    bundle = Streamlines()
    bundle.extend(np.array([[[0, 0., 0],
                            [1, 0., 0.],
                            [2, 0., 0.]]]))
    bundle.extend(np.array([[[0, 0., 0.],
                            [1, 0., 0],
                            [2, 0,  0.]]]))

    profile = afq_profile(data, bundle)
    npt.assert_equal(profile, np.ones(100))

    profile = afq_profile(data, bundle, affine=None, n_points=10,
                          weights=None)
    npt.assert_equal(profile, np.ones(10))

    profile = afq_profile(data, bundle, affine=None,
                          weights=gaussian_weights, stat=np.median)

    npt.assert_equal(profile, np.ones(100))

    profile = afq_profile(data, bundle, affine=None, orient_by=bundle[0],
                          weights=gaussian_weights, stat=np.median)

    npt.assert_equal(profile, np.ones(100))

    profile = afq_profile(data, bundle, affine=None, n_points=10,
                          weights=None)
    npt.assert_equal(profile, np.ones(10))

    profile = afq_profile(data, bundle, affine=None, n_points=10,
                          weights=np.ones((2, 10)) * 0.5)
    npt.assert_equal(profile, np.ones(10))

    # Disallow setting weights that don't sum to 1 across fibers/nodes:
    npt.assert_raises(ValueError, afq_profile,
                      data, bundle, affine=None,
                      n_points=10, weights=np.ones((2, 10)) * 0.6)

    # Test using an affine:
    affine = np.eye(4)
    affine[:, 3] = [-1, 100, -20, 1]
    # Transform the streamlines:
    bundle._data = bundle._data + affine[:3, 3]
    profile = afq_profile(data,
                          bundle,
                          affine=affine,
                          n_points=10,
                          weights=None)

    npt.assert_equal(profile, np.ones(10))

    # Test for error-handling:
    empty_bundle = Streamlines([])
    npt.assert_raises(ValueError, afq_profile, data, empty_bundle)
Пример #9
0
from dipy.direction import ProbabilisticDirectionGetter
from dipy.data import small_sphere
from dipy.io.stateful_tractogram import Space, StatefulTractogram
from dipy.io.streamline import save_trk

fod = csd_fit.odf(small_sphere)
pmf = fod.clip(min=0)
prob_dg = ProbabilisticDirectionGetter.from_pmf(pmf,
                                                max_angle=30.,
                                                sphere=small_sphere)
streamline_generator = LocalTracking(prob_dg,
                                     stopping_criterion,
                                     seeds,
                                     affine,
                                     step_size=.5)
streamlines = Streamlines(streamline_generator)
sft = StatefulTractogram(streamlines, hardi_img, Space.RASMM)
save_trk(sft, "tractogram_probabilistic_dg_pmf.trk")

if has_fury:
    scene = window.Scene()
    scene.add(actor.line(streamlines, colormap.line_colors(streamlines)))
    window.record(scene,
                  out_path='tractogram_probabilistic_dg_pmf.png',
                  size=(800, 800))
    if interactive:
        window.show(scene)
"""
.. figure:: tractogram_probabilistic_dg_pmf.png
   :align: center
Пример #10
0
fig.savefig('threshold_fa.png')

"""
.. figure:: threshold_fa.png
 :align: center

 **Thresholded fractional anisotropy map.**
"""

all_streamline_threshold_tc_generator = LocalTracking(dg,
                                                      threshold_classifier,
                                                      seeds,
                                                      affine,
                                                      step_size=.5,
                                                      return_all=True)
streamlines = Streamlines(all_streamline_threshold_tc_generator)
save_trk("all_streamlines_threshold_classifier.trk",
         streamlines,
         affine,
         labels.shape)

if have_fury:
    window.clear(ren)
    ren.add(actor.line(streamlines, cmap.line_colors(streamlines)))
    window.record(ren, out_path='all_streamlines_threshold_classifier.png',
                  size=(600, 600))
    if interactive:
        window.show(ren)

"""
.. figure:: all_streamlines_threshold_classifier.png
Пример #11
0
        def key_press(obj, event):
            key = obj.GetKeySym()
            if self.cluster:

                # hide on/off unselected centroids
                if key == 'h' or key == 'H':
                    if self.hide_centroids:
                        for ca in self.cea:
                            if (self.cea[ca]['length'] >= self.length_min or
                                    self.cea[ca]['size'] >= self.size_min):
                                if self.cea[ca]['selected'] == 0:
                                    ca.VisibilityOff()
                    else:
                        for ca in self.cea:
                            if (self.cea[ca]['length'] >= self.length_min and
                                    self.cea[ca]['size'] >= self.size_min):
                                if self.cea[ca]['selected'] == 0:
                                    ca.VisibilityOn()
                    self.hide_centroids = not self.hide_centroids
                    show_m.render()

                # invert selection
                if key == 'i' or key == 'I':

                    for ca in self.cea:
                        if (self.cea[ca]['length'] >= self.self.length_min and
                                self.cea[ca]['size'] >= self.size_min):
                            self.cea[ca]['selected'] = \
                                not self.cea[ca]['selected']
                            cas = self.cea[ca]['cluster_actor']
                            self.cla[cas]['selected'] = \
                                self.cea[ca]['selected']
                    show_m.render()

                # save current result
                if key == 's' or key == 'S':
                    saving_streamlines = Streamlines()
                    for bundle in self.cla.keys():
                        if bundle.GetVisibility():
                            t = self.cla[bundle]['tractogram']
                            c = self.cla[bundle]['cluster']
                            indices = self.tractogram_clusters[t][c]
                            saving_streamlines.extend(Streamlines(indices))
                    print('Saving result in tmp.trk')
                    save_trk('tmp.trk', saving_streamlines, np.eye(4))

                if key == 'y' or key == 'Y':
                    active_streamlines = Streamlines()
                    for bundle in self.cla.keys():
                        if bundle.GetVisibility():
                            t = self.cla[bundle]['tractogram']
                            c = self.cla[bundle]['cluster']
                            indices = self.tractogram_clusters[t][c]
                            active_streamlines.extend(Streamlines(indices))

                    # self.tractograms = [active_streamlines]
                    hz2 = horizon([active_streamlines],
                                  self.images, cluster=True, cluster_thr=5,
                                  random_colors=self.random_colors,
                                  length_lt=np.inf,
                                  length_gt=0, clusters_lt=np.inf,
                                  clusters_gt=0,
                                  world_coords=True,
                                  interactive=True)
                    ren2 = hz2.build_scene()
                    hz2.build_show(ren2)

                if key == 'a' or key == 'A':

                    if self.select_all is False:
                        for ca in self.cea:
                            if (self.cea[ca]['length'] >= self.length_min and
                                    self.cea[ca]['size'] >= self.size_min):
                                self.cea[ca]['selected'] = 1
                                cas = self.cea[ca]['cluster_actor']
                                self.cla[cas]['selected'] = \
                                    self.cea[ca]['selected']
                        show_m.render()
                        self.select_all = True
                    else:
                        for ca in self.cea:
                            if (self.cea[ca]['length'] >= self.length_min and
                                    self.cea[ca]['size'] >= self.size_min):
                                self.cea[ca]['selected'] = 0
                                cas = self.cea[ca]['cluster_actor']
                                self.cla[cas]['selected'] = \
                                    self.cea[ca]['selected']
                        show_m.render()
                        self.select_all = False

                if key == 'e' or key == 'E':

                    for c in self.cea:
                        if self.cea[c]['selected']:
                            if not self.cea[c]['expanded']:
                                len_ = self.cea[c]['length']
                                sz_ = self.cea[c]['size']
                                if (len_ >= self.length_min and
                                        sz_ >= self.size_min):
                                    self.cea[c]['cluster_actor']. \
                                        VisibilityOn()
                                    c.VisibilityOff()
                                    self.cea[c]['expanded'] = 1

                    show_m.render()

                if key == 'r' or key == 'R':

                    for c in self.cea:

                        if (self.cea[c]['length'] >= self.length_min and
                                self.cea[c]['size'] >= self.size_min):
                            self.cea[c]['cluster_actor'].VisibilityOff()
                            c.VisibilityOn()
                            self.cea[c]['expanded'] = 0

                show_m.render()
def run(context):

    ####################################################
    # Get the path to input files  and other parameter #
    ####################################################
    analysis_data = context.fetch_analysis_data()
    settings = analysis_data['settings']
    postprocessing = settings['postprocessing']

    hcpl_dwi_file_handle = context.get_files('input', modality='HARDI')[0]
    hcpl_dwi_file_path = hcpl_dwi_file_handle.download('/root/')

    hcpl_bvalues_file_handle = context.get_files(
        'input', reg_expression='.*prep.bvalues.hcpl.txt')[0]
    hcpl_bvalues_file_path = hcpl_bvalues_file_handle.download('/root/')
    hcpl_bvecs_file_handle = context.get_files(
        'input', reg_expression='.*prep.gradients.hcpl.txt')[0]
    hcpl_bvecs_file_path = hcpl_bvecs_file_handle.download('/root/')

    dwi_file_handle = context.get_files('input', modality='DSI')[0]
    dwi_file_path = dwi_file_handle.download('/root/')
    bvalues_file_handle = context.get_files(
        'input', reg_expression='.*prep.bvalues.txt')[0]
    bvalues_file_path = bvalues_file_handle.download('/root/')
    bvecs_file_handle = context.get_files(
        'input', reg_expression='.*prep.gradients.txt')[0]
    bvecs_file_path = bvecs_file_handle.download('/root/')

    inject_file_handle = context.get_files(
        'input', reg_expression='.*prep.inject.nii.gz')[0]
    inject_file_path = inject_file_handle.download('/root/')

    VUMC_ROIs_file_handle = context.get_files(
        'input', reg_expression='.*VUMC_ROIs.nii.gz')[0]
    VUMC_ROIs_file_path = VUMC_ROIs_file_handle.download('/root/')

    ###############################
    # _____ _____ _______     __  #
    # |  __ \_   _|  __ \ \   / / #
    # | |  | || | | |__) \ \_/ /  #
    # | |  | || | |  ___/ \   /   #
    # | |__| || |_| |      | |    #
    # |_____/_____|_|      |_|    #
    #                             #
    # dipy.org/documentation      #
    ###############################
    #       IronTract Team        #
    #      TrackyMcTrackface      #
    ###############################

    #################
    # Load the data #
    #################
    dwi_img = nib.load(hcpl_dwi_file_path)
    bvals, bvecs = read_bvals_bvecs(hcpl_bvalues_file_path,
                                    hcpl_bvecs_file_path)
    gtab = gradient_table(bvals, bvecs)

    ############################################
    # Extract the brain mask from the b0 image #
    ############################################
    _, brain_mask = median_otsu(dwi_img.get_data()[:, :, :, 0],
                                median_radius=2,
                                numpass=1)

    ##################################################################
    # Fit the tensor model and compute the fractional anisotropy map #
    ##################################################################
    context.set_progress(message='Processing voxel-wise DTI metrics.')
    tenmodel = TensorModel(gtab)
    tenfit = tenmodel.fit(dwi_img.get_data(), mask=brain_mask)
    FA = fractional_anisotropy(tenfit.evals)
    # fa_file_path = "/root/fa.nii.gz"
    # nib.Nifti1Image(FA,dwi_img.affine).to_filename(fa_file_path)

    ################################################
    # Compute Fiber Orientation Distribution (CSD) #
    ################################################
    context.set_progress(message='Processing voxel-wise FOD estimation.')
    response, _ = auto_response_ssst(gtab,
                                     dwi_img.get_data(),
                                     roi_radii=10,
                                     fa_thr=0.7)
    csd_model = ConstrainedSphericalDeconvModel(gtab, response, sh_order=6)
    csd_fit = csd_model.fit(dwi_img.get_data(), mask=brain_mask)
    # fod_file_path = "/root/fod.nii.gz"
    # nib.Nifti1Image(csd_fit.shm_coeff,dwi_img.affine).to_filename(fod_file_path)

    ###########################################
    # Compute DIPY Probabilistic Tractography #
    ###########################################
    context.set_progress(message='Processing tractography.')
    sphere = get_sphere("repulsion724")
    seed_mask_img = nib.load(inject_file_path)
    affine = seed_mask_img.affine
    seeds = utils.seeds_from_mask(seed_mask_img.get_data(), affine, density=5)

    stopping_criterion = ThresholdStoppingCriterion(FA, 0.2)
    prob_dg = ProbabilisticDirectionGetter.from_shcoeff(csd_fit.shm_coeff,
                                                        max_angle=20.,
                                                        sphere=sphere)
    streamline_generator = LocalTracking(prob_dg,
                                         stopping_criterion,
                                         seeds,
                                         affine,
                                         step_size=.2,
                                         max_cross=1)
    streamlines = Streamlines(streamline_generator)
    # sft = StatefulTractogram(streamlines, seed_mask_img, Space.RASMM)
    # streamlines_file_path = "/root/streamlines.trk"
    # save_trk(sft, streamlines_file_path)

    ###########################################################################
    # Compute 3D volumes for the IronTract Challenge. For 'EPFL', we only     #
    # keep streamlines with length > 1mm. We compute the visitation  count    #
    # image and apply a small gaussian smoothing. The gaussian smoothing      #
    # is especially usefull to increase voxel coverage of deterministic       #
    # algorithms. The log of the smoothed visitation count map is then        #
    # iteratively thresholded producing 200 volumes/operation points.         #
    # For VUMC, additional streamline filtering is done using anatomical      #
    # priors (keeping only streamlines that intersect with at least one ROI). #
    ###########################################################################
    if postprocessing in ["EPFL", "ALL"]:
        context.set_progress(message='Processing density map (EPFL)')
        volume_folder = "/root/vol_epfl"
        output_epfl_zip_file_path = "/root/TrackyMcTrackface_EPFL_example.zip"
        os.mkdir(volume_folder)
        lengths = length(streamlines)
        streamlines = streamlines[lengths > 1]
        density = utils.density_map(streamlines, affine, seed_mask_img.shape)
        density = scipy.ndimage.gaussian_filter(density.astype("float32"), 0.5)

        log_density = np.log10(density + 1)
        max_density = np.max(log_density)
        for i, t in enumerate(np.arange(0, max_density, max_density / 200)):
            nbr = str(i)
            nbr = nbr.zfill(3)
            mask = log_density >= t
            vol_filename = os.path.join(
                volume_folder, "vol" + nbr + "_t" + str(t) + ".nii.gz")
            nib.Nifti1Image(mask.astype("int32"), affine,
                            seed_mask_img.header).to_filename(vol_filename)
        shutil.make_archive(output_epfl_zip_file_path[:-4], 'zip',
                            volume_folder)

    if postprocessing in ["VUMC", "ALL"]:
        context.set_progress(message='Processing density map (VUMC)')
        ROIs_img = nib.load(VUMC_ROIs_file_path)
        volume_folder = "/root/vol_vumc"
        output_vumc_zip_file_path = "/root/TrackyMcTrackface_VUMC_example.zip"
        os.mkdir(volume_folder)
        lengths = length(streamlines)
        streamlines = streamlines[lengths > 1]

        rois = ROIs_img.get_fdata().astype(int)
        _, grouping = utils.connectivity_matrix(streamlines,
                                                affine,
                                                rois,
                                                inclusive=True,
                                                return_mapping=True,
                                                mapping_as_streamlines=False)
        streamlines = streamlines[grouping[(0, 1)]]

        density = utils.density_map(streamlines, affine, seed_mask_img.shape)
        density = scipy.ndimage.gaussian_filter(density.astype("float32"), 0.5)

        log_density = np.log10(density + 1)
        max_density = np.max(log_density)
        for i, t in enumerate(np.arange(0, max_density, max_density / 200)):
            nbr = str(i)
            nbr = nbr.zfill(3)
            mask = log_density >= t
            vol_filename = os.path.join(
                volume_folder, "vol" + nbr + "_t" + str(t) + ".nii.gz")
            nib.Nifti1Image(mask.astype("int32"), affine,
                            seed_mask_img.header).to_filename(vol_filename)
        shutil.make_archive(output_vumc_zip_file_path[:-4], 'zip',
                            volume_folder)

    ###################
    # Upload the data #
    ###################
    context.set_progress(message='Uploading results...')
    # context.upload_file(fa_file_path, 'fa.nii.gz')
    # context.upload_file(fod_file_path, 'fod.nii.gz')
    # context.upload_file(streamlines_file_path, 'streamlines.trk')
    if postprocessing in ["EPFL", "ALL"]:
        context.upload_file(output_epfl_zip_file_path,
                            'TrackyMcTrackface_EPFL_example.zip')
    if postprocessing in ["VUMC", "ALL"]:
        context.upload_file(output_vumc_zip_file_path,
                            'TrackyMcTrackface_VUMC_example.zip')
Пример #13
0
def test_horizon_flow():

    s1 = 10 * np.array([[0, 0, 0],
                        [1, 0, 0],
                        [2, 0, 0],
                        [3, 0, 0],
                        [4, 0, 0]], dtype='f8')

    s2 = 10 * np.array([[0, 0, 0],
                        [0, 1, 0],
                        [0, 2, 0],
                        [0, 3, 0],
                        [0, 4, 0]], dtype='f8')

    s3 = 10 * np.array([[0, 0, 0],
                        [1, 0.2, 0],
                        [2, 0.2, 0],
                        [3, 0.2, 0],
                        [4, 0.2, 0]], dtype='f8')

    print(s1.shape)
    print(s2.shape)
    print(s3.shape)

    streamlines = Streamlines()
    streamlines.append(s1)
    streamlines.append(s2)
    streamlines.append(s3)

    tractograms = [streamlines]
    images = None

    horizon(tractograms, images=images, cluster=True, cluster_thr=5,
            random_colors=False, length_lt=np.inf, length_gt=0,
            clusters_lt=np.inf, clusters_gt=0,
            world_coords=False, interactive=False)
#
    affine = np.diag([2., 1, 1, 1]).astype('f8')
#
    data = 255 * np.random.rand(150, 150, 150)
#
    images = [(data, affine)]

    horizon(tractograms, images=images, cluster=True, cluster_thr=5,
            random_colors=False, length_lt=np.inf, length_gt=0,
            clusters_lt=np.inf, clusters_gt=0,
            world_coords=True, interactive=False)

    with TemporaryDirectory() as out_dir:

        fimg = os.path.join(out_dir, 'test.nii.gz')
        ftrk = os.path.join(out_dir, 'test.trk')

        save_nifti(fimg, data, affine)
        save_tractogram(ftrk, streamlines, affine)

        input_files = [ftrk, fimg]

        npt.assert_equal(len(input_files), 2)

        hz_flow = HorizonFlow()

        hz_flow.run(input_files=input_files, stealth=True,
                    out_dir=out_dir, out_stealth_png='tmp_x.png')

        npt.assert_equal(os.path.exists(os.path.join(out_dir, 'tmp_x.png')),
                         True)
Пример #14
0
def track(peaks,
          seed_image,
          max_nr_fibers=2000,
          smooth=None,
          compress=0.1,
          bundle_mask=None,
          start_mask=None,
          end_mask=None,
          tracking_uncertainties=None,
          dilation=0,
          next_step_displacement_std=0.15,
          nr_cpus=-1,
          verbose=True):
    """
    Great speedup was archived by:
    - only seeding in bundle_mask instead of entire image (seeding took very long)
    - calculating fiber length on the fly instead of using extra function which has to iterate over entire fiber a
    second time

    Args:
        peaks:
        seed_image:
        max_nr_fibers:
        peak_threshold:
        smooth:
        compress:
        bundle_mask:
        start_mask:
        end_mask:
        dilation:
        nr_cpus:
        verbose:

    Returns:

    """
    import psutil

    peaks[:, :, :, 0] *= -1  # how to flip along x axis to work properly
    # Add +1 dilation for start and end mask to be more robust
    start_mask = binary_dilation(start_mask,
                                 iterations=dilation + 1).astype(np.uint8)
    end_mask = binary_dilation(end_mask,
                               iterations=dilation + 1).astype(np.uint8)
    if dilation > 0:
        bundle_mask = binary_dilation(bundle_mask,
                                      iterations=dilation).astype(np.uint8)

    if tracking_uncertainties is not None:
        tracking_uncertainties = img_utils.scale_to_range(
            tracking_uncertainties, range=(0, 1))

    global _PEAKS
    _PEAKS = peaks
    global _BUNDLE_MASK
    _BUNDLE_MASK = bundle_mask
    global _START_MASK
    _START_MASK = start_mask
    global _END_MASK
    _END_MASK = end_mask
    global _TRACKING_UNCERTAINTIES
    _TRACKING_UNCERTAINTIES = tracking_uncertainties

    # Get list of coordinates of each voxel in mask to seed from those
    mask_coords = np.array(np.where(bundle_mask == 1)).transpose()
    nr_voxels = mask_coords.shape[0]
    spacing = seed_image.header.get_zooms()[0]

    # max_nr_seeds = 250 * max_nr_fibers
    max_nr_seeds = 100 * max_nr_fibers  # after how many seeds to abort (to avoid endless runtime)
    # How many seeds to process in each pool.map iteration
    seeds_per_batch = 5000

    if nr_cpus == -1:
        nr_processes = psutil.cpu_count()
    else:
        nr_processes = nr_cpus

    streamlines = []
    fiber_ctr = 0
    seed_ctr = 0
    # Processing seeds in batches so we can stop after we reached desired nr of streamlines. Not ideal. Could be
    #   optimised by more multiprocessing fanciness.
    while fiber_ctr < max_nr_fibers:
        pool = multiprocessing.Pool(processes=nr_processes)
        streamlines_tmp = pool.map(
            partial(process_seedpoint,
                    next_step_displacement_std=next_step_displacement_std,
                    spacing=spacing),
            seed_generator(mask_coords, seeds_per_batch))
        # streamlines_tmp = [process_seedpoint(seed, spacing=spacing) for seed in
        #                    seed_generator(mask_coords, seeds_per_batch)] # single threaded for debug
        pool.close()
        pool.join()

        streamlines_tmp = [sl for sl in streamlines_tmp
                           if len(sl) > 0]  # filter empty
        streamlines += streamlines_tmp
        fiber_ctr = len(streamlines)
        if verbose:
            print("nr_fibs: {}".format(fiber_ctr))
        seed_ctr += seeds_per_batch
        if seed_ctr > max_nr_seeds:
            if verbose:
                print("Early stopping because max nr of seeds reached.")
            break

    if verbose:
        print("final nr streamlines: {}".format(len(streamlines)))

    streamlines = streamlines[:
                              max_nr_fibers]  # remove surplus of fibers (comes from multiprocessing)
    streamlines = Streamlines(streamlines)  # Generate streamlines object

    # Move from convention "0mm is in voxel corner" to convention "0mm is in voxel center". Most toolkit use the
    # convention "0mm is in voxel center".
    streamlines = fiber_utils.add_to_each_streamline(streamlines, -0.5)

    # move streamlines to coordinate space
    #  This is doing: streamlines(coordinate_space) = affine * streamlines(voxel_space)
    streamlines = list(transform_streamlines(streamlines, seed_image.affine))

    # Smoothing does not change overall results at all because is just little smoothing. Just removes small unevenness.
    if smooth:
        streamlines = fiber_utils.smooth_streamlines(streamlines,
                                                     smoothing_factor=smooth)

    if compress:
        streamlines = fiber_utils.compress_streamlines(streamlines,
                                                       error_threshold=0.1,
                                                       nr_cpus=nr_cpus)

    return streamlines
Пример #15
0
def read_bundles_2_subjects(subj_id='subj_1',
                            metrics=['fa'],
                            bundles=['af.left', 'cst.right', 'cc_1']):
    r"""Read images and streamlines from 2 subjects of the SNAIL dataset.

    Parameters
    ----------
    subj_id : string
        Either ``subj_1`` or ``subj_2``.
    metrics : list
        Either ['fa'] or ['t1'] or ['fa', 't1']
    bundles : list
        E.g., ['af.left', 'cst.right', 'cc_1']. See all the available bundles
        in the ``exp_bundles_maps/bundles_2_subjects`` directory of your
        ``$HOME/.dipy`` folder.

    Returns
    -------
    dix : dict
        Dictionary with data of the metrics and the bundles as keys.

    Notes
    -----
    If you are using these datasets please cite the following publications.

    References
    ----------
    .. [1] Renauld, E., M. Descoteaux, M. Bernier, E. Garyfallidis,
    K. Whittingstall, "Morphology of thalamus, LGN and optic radiation do not
    influence EEG alpha waves", Plos One (under submission), 2015.

    .. [2] Garyfallidis, E., O. Ocegueda, D. Wassermann,
    M. Descoteaux. Robust and efficient linear registration of fascicles in the
    space of streamlines , Neuroimage, 117:124-140, 2015.

    """
    dname = pjoin(dipy_home, 'exp_bundles_and_maps', 'bundles_2_subjects')

    from dipy.io.streamline import load_tractogram
    from dipy.tracking.streamline import Streamlines

    res = {}

    if 't1' in metrics:
        data, affine = load_nifti(pjoin(dname, subj_id, 't1_warped.nii.gz'))
        res['t1'] = data

    if 'fa' in metrics:
        fa, affine = load_nifti(pjoin(dname, subj_id, 'fa_1x1x1.nii.gz'))
        res['fa'] = fa

    res['affine'] = affine

    for bun in bundles:

        streams = load_tractogram(pjoin(dname, subj_id, 'bundles',
                                        'bundles_' + bun + '.trk'),
                                  'same',
                                  bbox_valid_check=False).streamlines

        streamlines = Streamlines(streams)
        res[bun] = streamlines

    return res
Пример #16
0
                                              step_size=step_size,
                                              average_voxel_size=voxel_size)

# Particle Filtering Tractography
pft_streamline_generator = ParticleFilteringTracking(dg,
                                                     cmc_classifier,
                                                     seeds,
                                                     affine,
                                                     max_cross=1,
                                                     step_size=step_size,
                                                     maxlen=1000,
                                                     pft_back_tracking_dist=2,
                                                     pft_front_tracking_dist=1,
                                                     particle_count=15,
                                                     return_all=False)
streamlines = Streamlines(pft_streamline_generator)
save_trk("tractogram_pft.trk", streamlines, affine, shape)

if has_fury:
    r = window.Renderer()
    r.add(actor.line(streamlines, colormap.line_colors(streamlines)))
    window.record(r, out_path='tractogram_pft.png', size=(800, 800))
    if interactive:
        window.show(r)
"""
.. figure:: tractogram_pft.png
 :align: center

 **Corpus Callosum using particle filtering tractography**
"""
Пример #17
0
                             data,
                             default_sphere,
                             relative_peak_threshold=.8,
                             min_separation_angle=45,
                             mask=white_matter)

classifier = ThresholdStoppingCriterion(csa_peaks.gfa, .25)

seed_mask = labels == 2
seeds = utils.seeds_from_mask(seed_mask, density=[1, 1, 1], affine=affine)

# Initialization of LocalTracking. The computation happens in the next step.
streamlines = LocalTracking(csa_peaks, classifier, seeds, affine, step_size=2)

# Compute streamlines and store as a list.
streamlines = Streamlines(streamlines)

###############################################################################
# We will create a streamline actor from the streamlines.

streamlines_actor = actor.line(streamlines, line_colors(streamlines))

###############################################################################
# Next, we create a surface actor from the corpus callosum seed ROI. We
# provide the ROI data, the affine, the color in [R,G,B], and the opacity as
# a decimal between zero and one. Here, we set the color as blue/green with
# 50% opacity.

surface_opacity = 0.5
surface_color = [0, 1, 1]
Пример #18
0
def test_gaussian_weights():
    # Some bogus x,y,z coordinates
    x = np.arange(10).astype(float)
    y = np.arange(10).astype(float)
    z = np.arange(10).astype(float)

    # Create a distribution for which we can predict the weights we would
    # expect to get:
    bundle = Streamlines([np.array([x, y, z]).T + 1,
                          np.array([x, y, z]).T - 1])
    # In this case, all nodes receives an equal weight of 0.5:
    w = gaussian_weights(bundle, n_points=10)
    npt.assert_almost_equal(w, np.ones((len(bundle), 10)) * 0.5)

    # Test when asked to return Mahalanobis, instead of weights
    w = gaussian_weights(bundle, n_points=10, return_mahalnobis=True)
    npt.assert_almost_equal(w, np.ones((len(bundle), 10)))

    # Here, some nodes are twice as far from the mean as others
    bundle = Streamlines([np.array([x, y, z]).T + 2,
                          np.array([x, y, z]).T + 1,
                          np.array([x, y, z]).T - 1,
                          np.array([x, y, z]).T - 2])
    w = gaussian_weights(bundle, n_points=10)

    # And their weights should be halved:
    npt.assert_almost_equal(w[0], w[1] / 2)
    npt.assert_almost_equal(w[-1], w[2] / 2)

    # Test the situation where all the streamlines have an identical node:
    arr1 = np.array([x, y, z]).T + 2
    arr2 = np.array([x, y, z]).T + 1
    arr3 = np.array([x, y, z]).T - 1
    arr4 = np.array([x, y, z]).T - 2

    arr1[0] = np.array([1, 1, 1])
    arr2[0] = np.array([1, 1, 1])
    arr3[0] = np.array([1, 1, 1])
    arr4[0] = np.array([1, 1, 1])

    bundle_w_id_node = Streamlines([arr1, arr2, arr3, arr4])
    w = gaussian_weights(Streamlines(bundle_w_id_node), n_points=10)
    # For this case, the result should be a weight of 1/n_streamlines in that
    # node for all streamlines:
    npt.assert_equal(w[:, 0],
                     np.ones(len(bundle_w_id_node)) * 1/len(bundle_w_id_node))

    # Test the situation where all the streamlines are copies of each other:
    bundle_w_copies = Streamlines([bundle[0], bundle[0], bundle[0], bundle[0]])
    w = gaussian_weights(bundle_w_copies, n_points=10)
    # In this case, the entire array should be equal to 1/n_streamlines:
    npt.assert_equal(w,
                     np.ones(w.shape) * 1/len(bundle_w_id_node))

    # Test with bundle of length 1:
    bundle_len_1 = Streamlines([bundle[0]])
    w = gaussian_weights(bundle_len_1, n_points=10)
    npt.assert_equal(w, np.ones(w.shape))

    bundle_len_1 = Streamlines([bundle[0]])
    w = gaussian_weights(bundle_len_1, n_points=10, return_mahalnobis=True)
    npt.assert_equal(w, np.ones(w.shape) * np.nan)
Пример #19
0
def test_orient_by_rois():
    streamlines = Streamlines([
        np.array([[0, 0., 0], [1, 0., 0.], [2, 0., 0.]]),
        np.array([[2, 0., 0.], [1, 0., 0], [0, 0, 0.]])
    ])

    # Make two ROIs:
    mask1_vol = np.zeros((4, 4, 4), dtype=bool)
    mask2_vol = np.zeros_like(mask1_vol)
    mask1_vol[0, 0, 0] = True
    mask2_vol[1, 0, 0] = True
    mask1_coords = np.array(np.where(mask1_vol)).T
    mask2_coords = np.array(np.where(mask2_vol)).T

    # If there is an affine, we'll use it:
    affine = np.eye(4)
    affine[:, 3] = [-1, 100, -20, 1]
    # Transform the streamlines:
    x_streamlines = Streamlines([sl + affine[:3, 3] for sl in streamlines])

    # After reorientation, this should be the answer:
    flipped_sl = Streamlines([streamlines[0], streamlines[1][::-1]])
    new_streamlines = orient_by_rois(streamlines,
                                     mask1_vol,
                                     mask2_vol,
                                     in_place=False,
                                     affine=None,
                                     as_generator=False)
    npt.assert_array_equal(new_streamlines, flipped_sl)

    npt.assert_(new_streamlines is not streamlines)

    # Test with affine:
    x_flipped_sl = Streamlines([s + affine[:3, 3] for s in flipped_sl])
    new_streamlines = orient_by_rois(x_streamlines,
                                     mask1_vol,
                                     mask2_vol,
                                     in_place=False,
                                     affine=affine,
                                     as_generator=False)
    npt.assert_array_equal(new_streamlines, x_flipped_sl)
    npt.assert_(new_streamlines is not x_streamlines)

    # Test providing coord ROIs instead of vol ROIs:
    new_streamlines = orient_by_rois(x_streamlines,
                                     mask1_coords,
                                     mask2_coords,
                                     in_place=False,
                                     affine=affine,
                                     as_generator=False)
    npt.assert_array_equal(new_streamlines, x_flipped_sl)

    # Test with as_generator set to True
    new_streamlines = orient_by_rois(streamlines,
                                     mask1_vol,
                                     mask2_vol,
                                     in_place=False,
                                     affine=None,
                                     as_generator=True)

    npt.assert_(isinstance(new_streamlines, types.GeneratorType))
    ll = Streamlines(new_streamlines)
    npt.assert_array_equal(ll, flipped_sl)

    # Test with as_generator set to True and with the affine
    new_streamlines = orient_by_rois(x_streamlines,
                                     mask1_vol,
                                     mask2_vol,
                                     in_place=False,
                                     affine=affine,
                                     as_generator=True)

    npt.assert_(isinstance(new_streamlines, types.GeneratorType))
    ll = Streamlines(new_streamlines)
    npt.assert_array_equal(ll, x_flipped_sl)

    # Test with generator input:
    new_streamlines = orient_by_rois(generate_sl(streamlines),
                                     mask1_vol,
                                     mask2_vol,
                                     in_place=False,
                                     affine=None,
                                     as_generator=True)

    npt.assert_(isinstance(new_streamlines, types.GeneratorType))
    ll = Streamlines(new_streamlines)
    npt.assert_array_equal(ll, flipped_sl)

    # Generator output cannot take a True `in_place` kwarg:
    npt.assert_raises(ValueError, orient_by_rois,
                      *[generate_sl(streamlines), mask1_vol, mask2_vol],
                      **dict(in_place=True, affine=None, as_generator=True))

    # But you can input a generator and get a non-generator as output:
    new_streamlines = orient_by_rois(generate_sl(streamlines),
                                     mask1_vol,
                                     mask2_vol,
                                     in_place=False,
                                     affine=None,
                                     as_generator=False)

    npt.assert_(not isinstance(new_streamlines, types.GeneratorType))
    npt.assert_array_equal(new_streamlines, flipped_sl)

    # Modify in-place:
    new_streamlines = orient_by_rois(streamlines,
                                     mask1_vol,
                                     mask2_vol,
                                     in_place=True,
                                     affine=None,
                                     as_generator=False)

    npt.assert_array_equal(new_streamlines, flipped_sl)
    # The two objects are one and the same:
    npt.assert_(new_streamlines is streamlines)
Пример #20
0
def test_length():
    # Test length of only one streamline
    length_streamline_cython = length(streamline)
    length_streamline_python = length_python(streamline)
    assert_almost_equal(length_streamline_cython, length_streamline_python)

    length_streamline_cython = length(streamline_64bit)
    length_streamline_python = length_python(streamline_64bit)
    assert_almost_equal(length_streamline_cython, length_streamline_python)

    # Test computing length of multiple streamlines of different nb_points
    length_streamlines_cython = length(streamlines)

    for i, s in enumerate(streamlines):
        length_streamline_python = length_python(s)
        assert_array_almost_equal(length_streamlines_cython[i],
                                  length_streamline_python)

    length_streamlines_cython = length(streamlines_64bit)

    for i, s in enumerate(streamlines_64bit):
        length_streamline_python = length_python(s)
        assert_array_almost_equal(length_streamlines_cython[i],
                                  length_streamline_python)

    # ArraySequence
    # Test length of only one streamline
    length_streamline_cython = length(streamline_64bit)
    length_streamline_arrseq = length(Streamlines([streamline]))
    assert_almost_equal(length_streamline_arrseq, length_streamline_cython)

    length_streamline_cython = length(streamline_64bit)
    length_streamline_arrseq = length(Streamlines([streamline_64bit]))
    assert_almost_equal(length_streamline_arrseq, length_streamline_cython)

    # Test computing length of multiple streamlines of different nb_points
    length_streamlines_cython = length(streamlines)
    length_streamlines_arrseq = length(Streamlines(streamlines))
    assert_array_almost_equal(length_streamlines_arrseq,
                              length_streamlines_cython)

    length_streamlines_cython = length(streamlines_64bit)
    length_streamlines_arrseq = length(Streamlines(streamlines_64bit))
    assert_array_almost_equal(length_streamlines_arrseq,
                              length_streamlines_cython)

    # Test on a sliced ArraySequence
    length_streamlines_cython = length(streamlines_64bit[::2])
    length_streamlines_arrseq = length(Streamlines(streamlines_64bit)[::2])
    assert_array_almost_equal(length_streamlines_arrseq,
                              length_streamlines_cython)
    length_streamlines_cython = length(streamlines[::-1])
    length_streamlines_arrseq = length(Streamlines(streamlines)[::-1])
    assert_array_almost_equal(length_streamlines_arrseq,
                              length_streamlines_cython)

    # Test streamlines having mixed dtype
    streamlines_mixed_dtype = [
        streamline,
        streamline.astype(np.float64),
        streamline.astype(np.int32),
        streamline.astype(np.int64)
    ]
    lengths_mixed_dtype = [length(s) for s in streamlines_mixed_dtype]
    assert_array_equal(length(streamlines_mixed_dtype), lengths_mixed_dtype)

    # Test streamlines with different shape
    length_streamlines_cython = length(heterogeneous_streamlines)

    for i, s in enumerate(heterogeneous_streamlines):
        length_streamline_python = length_python(s)
        assert_array_almost_equal(length_streamlines_cython[i],
                                  length_streamline_python)

    # Test streamline having integer dtype
    length_streamline = length(streamline.astype('int'))
    assert_equal(length_streamline.dtype, np.float64)

    # Test empty list
    assert_equal(length([]), 0.0)

    # Test streamline having only one point
    assert_equal(length(np.array([[1, 2, 3]])), 0.0)

    # We do not support list of lists, it should be numpy ndarray.
    streamline_unsupported = [[1, 2, 3], [4, 5, 5], [2, 1, 3], [4, 2, 1]]
    assert_raises(AttributeError, length, streamline_unsupported)

    # Test setting computing length of a numpy with flag WRITABLE=False
    streamlines_readonly = []
    for s in streamlines:
        streamlines_readonly.append(s.copy())
        streamlines_readonly[-1].setflags(write=False)

    assert_array_almost_equal(length(streamlines_readonly),
                              [length_python(s) for s in streamlines_readonly])
    streamlines_readonly = []
    for s in streamlines_64bit:
        streamlines_readonly.append(s.copy())
        streamlines_readonly[-1].setflags(write=False)

    assert_array_almost_equal(length(streamlines_readonly),
                              [length_python(s) for s in streamlines_readonly])
Пример #21
0
def test_set_number_of_points():
    # Test resampling of only one streamline
    nb_points = 12
    new_streamline_cython = set_number_of_points(streamline, nb_points)
    new_streamline_python = set_number_of_points_python(streamline, nb_points)
    assert_equal(len(new_streamline_cython), nb_points)
    # Using a 5 digits precision because of streamline is in float32.
    assert_array_almost_equal(new_streamline_cython, new_streamline_python, 5)

    new_streamline_cython = set_number_of_points(streamline_64bit, nb_points)
    new_streamline_python = set_number_of_points_python(
        streamline_64bit, nb_points)
    assert_equal(len(new_streamline_cython), nb_points)
    assert_array_almost_equal(new_streamline_cython, new_streamline_python)

    res = []
    simple_streamline = np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2]], 'f4')
    for nb_points in range(2, 200):
        new_streamline_cython = set_number_of_points(simple_streamline,
                                                     nb_points)
        res.append(nb_points - len(new_streamline_cython))
    assert_equal(np.sum(res), 0)

    # Test resampling of multiple streamlines of different nb_points
    nb_points = 12
    new_streamlines_cython = set_number_of_points(streamlines, nb_points)

    for i, s in enumerate(streamlines):
        new_streamline_python = set_number_of_points_python(s, nb_points)
        # Using a 5 digits precision because of streamline is in float32.
        assert_array_almost_equal(new_streamlines_cython[i],
                                  new_streamline_python, 5)

    # ArraySequence
    arrseq = Streamlines(streamlines)
    new_streamlines_as_seq_cython = set_number_of_points(arrseq, nb_points)
    assert_array_almost_equal(new_streamlines_as_seq_cython,
                              new_streamlines_cython)

    new_streamlines_cython = set_number_of_points(streamlines_64bit, nb_points)

    for i, s in enumerate(streamlines_64bit):
        new_streamline_python = set_number_of_points_python(s, nb_points)
        assert_array_almost_equal(new_streamlines_cython[i],
                                  new_streamline_python)

    # ArraySequence
    arrseq = Streamlines(streamlines_64bit)
    new_streamlines_as_seq_cython = set_number_of_points(arrseq, nb_points)
    assert_array_almost_equal(new_streamlines_as_seq_cython,
                              new_streamlines_cython)

    # Test streamlines with mixed dtype
    streamlines_mixed_dtype = [
        streamline,
        streamline.astype(np.float64),
        streamline.astype(np.int32),
        streamline.astype(np.int64)
    ]
    nb_points_mixed_dtype = [
        len(s)
        for s in set_number_of_points(streamlines_mixed_dtype, nb_points)
    ]
    assert_array_equal(nb_points_mixed_dtype,
                       [nb_points] * len(streamlines_mixed_dtype))

    # Test streamlines with different shape
    new_streamlines_cython = set_number_of_points(heterogeneous_streamlines,
                                                  nb_points)

    for i, s in enumerate(heterogeneous_streamlines):
        new_streamline_python = set_number_of_points_python(s, nb_points)
        assert_array_almost_equal(new_streamlines_cython[i],
                                  new_streamline_python)

    # Test streamline with integer dtype
    new_streamline = set_number_of_points(streamline.astype(np.int32))
    assert_equal(new_streamline.dtype, np.float32)
    new_streamline = set_number_of_points(streamline.astype(np.int64))
    assert_equal(new_streamline.dtype, np.float64)

    # Test empty list
    assert_equal(set_number_of_points([]), [])

    # Test streamline having only one point
    assert_raises(ValueError, set_number_of_points, np.array([[1, 2, 3]]))

    # We do not support list of lists, it should be numpy ndarray.
    streamline_unsupported = [[1, 2, 3], [4, 5, 5], [2, 1, 3], [4, 2, 1]]
    assert_raises(AttributeError, set_number_of_points, streamline_unsupported)

    # Test setting number of points of a numpy with flag WRITABLE=False
    streamline_readonly = streamline.copy()
    streamline_readonly.setflags(write=False)
    assert_equal(len(set_number_of_points(streamline_readonly, nb_points=42)),
                 42)

    # Test setting computing length of a numpy with flag WRITABLE=False
    streamlines_readonly = []
    for s in streamlines:
        streamlines_readonly.append(s.copy())
        streamlines_readonly[-1].setflags(write=False)

    assert_equal(len(set_number_of_points(streamlines_readonly, nb_points=42)),
                 len(streamlines_readonly))

    streamlines_readonly = []
    for s in streamlines_64bit:
        streamlines_readonly.append(s.copy())
        streamlines_readonly[-1].setflags(write=False)

    assert_equal(len(set_number_of_points(streamlines_readonly, nb_points=42)),
                 len(streamlines_readonly))

    # Test if nb_points is less than 2
    assert_raises(ValueError,
                  set_number_of_points, [np.ones(
                      (10, 3)), np.ones((10, 3))],
                  nb_points=1)
Пример #22
0
def save_roisubset(streamlines, roislist, roisexcel, labelmask, stringstep, ratios, trkpath, subject, affine, header):
    
    #atlas_legends = BIGGUS_DISKUS + "/atlases/CHASSSYMM3AtlasLegends.xlsx"
    
    df = pd.read_excel(roisexcel, sheet_name='Sheet1')
    df['Structure'] = df['Structure'].str.lower()    
    
    for rois in roislist:
        if len(rois)==1:
            roiname = "_" + rois[0] + "_"
        elif len(rois)>1:
            roiname="_"
            for roi in rois:
                roiname = roiname + roi[0:4]
            roiname = roiname + "_"    
            
        labelslist=[]#fimbria

        for roi in rois:
            rslt_df = df.loc[df['Structure'] == roi.lower()]
            if roi.lower() == "wholebrain" or roi.lower() == "brain":
                labelslist=None
            else:
                labelslist=np.concatenate((labelslist,np.array(rslt_df.index2)))

        if isempty(labelslist) and roi.lower() != "wholebrain" and roi.lower() != "brain":
            txt = "Warning: Unrecognized roi, will take whole brain as ROI. The roi specified was: " + roi
            print(txt)

#bvec_orient=[-2,1,3]    
    
        if isempty(labelslist):
            roimask = np.where(labelmask == 0, False, True)
        else:
            if labelmask is None:
                raise ("Bad label data, could not define ROI for streams")
            roimask = np.zeros(np.shape(labelmask),dtype=int)
            for label in labelslist:
                roimask = roimask + (labelmask == label)
        
        if not isempty(labelslist):
            trkroipath = trkpath + '/' + subject + roiname + "_stepsize_" + stringstep + '.trk'
            if not os.path.exists(trkroipath):
                affinetemp=np.eye(4)
                trkstreamlines = target(streamlines, affinetemp, roimask, include=True, strict="longstring")
                trkstreamlines = Streamlines(trkstreamlines)
                myheader = create_tractogram_header(trkroipath, *header)
                roi_sl = lambda: (s for s in trkstreamlines)
                save_trk_heavy_duty(trkroipath, streamlines=roi_sl,
                            affine=affine, header=myheader)
            else:
                trkdata = load_trk(trkroipath, 'same')
                trkdata.to_vox()
                if hasattr(trkdata, 'space_attribute'):
                    header = trkdata.space_attribute
                elif hasattr(trkdata, 'space_attributes'):
                    header = trkdata.space_attributes
                trkstreamlines = trkdata.streamlines
                
        for ratio in ratios:
            if ratio != 1:
                trkroiminipath = trkpath + '/' + subject + '_ratio_' + ratios + roiname + "_stepsize_" + stringstep + '.trk'
                if not os.path.exists(trkroiminipath):
                    ministream = []
                    for idx, stream in enumerate(trkstreamlines):
                        if (idx % ratio) == 0:
                            ministream.append(stream)
                    trkstreamlines = ministream
                    myheader = create_tractogram_header(trkminipath, *header)
                    ratioed_roi_sl_gen = lambda: (s for s in trkstreamlines)
                    if allsave:
                        save_trk_heavy_duty(trkroiminipath, streamlines=ratioed_roi_sl_gen,
                                            affine=affine, header=myheader)
                else:
                    trkdata = load_trk(trkminipath, 'same')
                    trkdata.to_vox()
                    if hasattr(trkdata, 'space_attribute'):
                        header = trkdata.space_attribute
                    elif hasattr(trkdata, 'space_attributes'):
                        header = trkdata.space_attributes
                    trkstreamlines = trkdata.streamlines
Пример #23
0
tensor_model = dti.TensorModel(gtab)
tenfit = tensor_model.fit(data, mask=white_matter)

FA = fractional_anisotropy(tenfit.evals)
classifier = ThresholdTissueClassifier(FA, .2)

from dipy.data import default_sphere
from dipy.direction import DeterministicMaximumDirectionGetter
from dipy.io.streamline import save_trk, load_trk

detmax_dg = DeterministicMaximumDirectionGetter.from_shcoeff(
    csd_fit.shm_coeff, max_angle=40., sphere=default_sphere)
from dipy.tracking.streamline import Streamlines

streamlines = Streamlines(
    LocalTracking(detmax_dg, classifier, seeds, affine, step_size=.1))

long_streamlines = np.ones((len(streamlines)), bool)
for i in range(0, len(streamlines)):
    if streamlines[i].shape[0] < 70:
        long_streamlines[i] = False
streamlines = streamlines[long_streamlines]

from dipy.viz import window, actor, colormap as cmap

streamlines_actor = actor.line(streamlines, cmap.line_colors(streamlines))

# weighted streamlines:
from dipy.tracking.streamline import transform_streamlines
''' FA weighting:
fa_name = r'20190211_134016ep2dd155D60MB3APs004a001_FA.nii'
Пример #24
0
def test_values_from_volume():
    decimal = 4
    data3d = np.arange(2000).reshape(20, 10, 10)
    # Test two cases of 4D data (handled differently)
    # One where the last dimension is length 3:
    data4d_3vec = np.arange(6000).reshape(20, 10, 10, 3)
    # The other where the last dimension is not 3:
    data4d_2vec = np.arange(4000).reshape(20, 10, 10, 2)
    for dt in [np.float32, np.float64]:
        for data in [data3d, data4d_3vec, data4d_2vec]:
            sl1 = [
                np.array([[1, 0, 0], [1.5, 0, 0], [2, 0, 0], [2.5, 0,
                                                              0]]).astype(dt),
                np.array([[2, 0, 0], [3.1, 0, 0], [3.9, 0, 0], [4.1, 0,
                                                                0]]).astype(dt)
            ]

            ans1 = [[
                data[1, 0,
                     0], data[1, 0, 0] + (data[2, 0, 0] - data[1, 0, 0]) / 2,
                data[2, 0,
                     0], data[2, 0, 0] + (data[3, 0, 0] - data[2, 0, 0]) / 2
            ],
                    [
                        data[2, 0, 0],
                        data[3, 0, 0] + (data[4, 0, 0] - data[3, 0, 0]) * 0.1,
                        data[3, 0, 0] + (data[4, 0, 0] - data[3, 0, 0]) * 0.9,
                        data[4, 0, 0] + (data[5, 0, 0] - data[4, 0, 0]) * 0.1
                    ]]

            vv = values_from_volume(data, sl1)
            npt.assert_almost_equal(vv, ans1, decimal=decimal)

            vv = values_from_volume(data, np.array(sl1))
            npt.assert_almost_equal(vv, ans1, decimal=decimal)

            vv = values_from_volume(data, Streamlines(sl1))
            npt.assert_almost_equal(vv, ans1, decimal=decimal)

            affine = np.eye(4)
            affine[:, 3] = [-100, 10, 1, 1]
            x_sl1 = ut.move_streamlines(sl1, affine)
            x_sl2 = ut.move_streamlines(sl1, affine)

            vv = values_from_volume(data, x_sl1, affine=affine)
            npt.assert_almost_equal(vv, ans1, decimal=decimal)

            # The generator has already been consumed so needs to be
            # regenerated:
            x_sl1 = list(ut.move_streamlines(sl1, affine))
            vv = values_from_volume(data, x_sl1, affine=affine)
            npt.assert_almost_equal(vv, ans1, decimal=decimal)

            # Test that the streamlines haven't mutated:
            l_sl2 = list(x_sl2)
            npt.assert_equal(x_sl1, l_sl2)

            vv = values_from_volume(data, np.array(x_sl1), affine=affine)
            npt.assert_almost_equal(vv, ans1, decimal=decimal)
            npt.assert_equal(np.array(x_sl1), np.array(l_sl2))

            # Test for lists of streamlines with different numbers of nodes:
            sl2 = [sl1[0][:-1], sl1[1]]
            ans2 = [ans1[0][:-1], ans1[1]]
            vv = values_from_volume(data, sl2)
            for ii, v in enumerate(vv):
                npt.assert_almost_equal(v, ans2[ii], decimal=decimal)

    # We raise an error if the streamlines fed don't make sense. In this
    # case, a tuple instead of a list, generator or array
    nonsense_sl = (np.array([[1, 0, 0], [1.5, 0, 0], [2, 0, 0], [2.5, 0, 0]]),
                   np.array([[2, 0, 0], [3.1, 0, 0], [3.9, 0, 0], [4.1, 0,
                                                                   0]]))

    npt.assert_raises(RuntimeError, values_from_volume, data, nonsense_sl)

    # For some use-cases we might have singleton streamlines (with only one
    # node each):
    data3D = np.ones((2, 2, 2))
    streamlines = np.ones((10, 1, 3))
    npt.assert_equal(values_from_volume(data3D, streamlines).shape, (10, 1))
    data4D = np.ones((2, 2, 2, 2))
    streamlines = np.ones((10, 1, 3))
    npt.assert_equal(values_from_volume(data4D, streamlines).shape, (10, 1, 2))
Пример #25
0
import numpy as np
import nibabel as nib
from numpy.testing import (assert_equal,
                           assert_almost_equal,
                           run_module_suite)
from dipy.data import get_fnames
from dipy.segment.bundles import RecoBundles
from dipy.tracking.distances import bundles_distances_mam
from dipy.tracking.streamline import Streamlines
from dipy.segment.clustering import qbx_and_merge


streams, hdr = nib.trackvis.read(get_fnames('fornix'))
fornix = [s[0] for s in streams]

f = Streamlines(fornix)
f1 = f.copy()

f2 = f1[:20].copy()
f2._data += np.array([50, 0, 0])

f3 = f1[200:].copy()
f3._data += np.array([100, 0, 0])

f.extend(f2)
f.extend(f3)


def test_rb_check_defaults():

    rb = RecoBundles(f, greater_than=0, clust_thr=10)
Пример #26
0
def plot_bundles_with_metric(bundle_path,
                             endings_path,
                             brain_mask_path,
                             bundle,
                             metrics,
                             output_path,
                             tracking_format="trk_legacy",
                             show_color_bar=True):
    import seaborn as sns  # import in function to avoid error if not installed (this is only needed in this function)
    from dipy.viz import actor, window
    from tractseg.libs import vtk_utils

    def _add_extra_point_to_last_streamline(sl):
        # Coloring broken as soon as all streamlines have same number of points -> why???
        # Add one number to last streamline to make it have a different number
        sl[-1] = np.append(sl[-1], [sl[-1][-1]], axis=0)
        return sl

    # Settings
    NR_SEGMENTS = 100
    ANTI_INTERPOL_MULT = 1  # increase number of points to avoid interpolation to blur the colors
    algorithm = "distance_map"  # equal_dist | distance_map | cutting_plane
    # colors = np.array(sns.color_palette("coolwarm", NR_SEGMENTS))  # colormap blue to red (does not fit to colorbar)
    colors = np.array(sns.light_palette(
        "red", NR_SEGMENTS))  # colormap only red, which fits to color_bar
    img_size = (1000, 1000)

    # Tractometry skips first and last element. Therefore we only have 98 instead of 100 elements.
    # Here we duplicate the first and last element to get back to 100 elements
    metrics = list(metrics)
    metrics = np.array([metrics[0]] + metrics + [metrics[-1]])

    metrics_max = metrics.max()
    metrics_min = metrics.min()
    if metrics_max == metrics_min:
        metrics = np.zeros(len(metrics))
    else:
        metrics = img_utils.scale_to_range(
            metrics,
            range=(0, 99))  # range needs to be same as segments in colormap

    orientation = dataset_specific_utils.get_optimal_orientation_for_bundle(
        bundle)

    # Load mask
    beginnings_img = nib.load(endings_path)
    beginnings = beginnings_img.get_fdata().astype(np.uint8)
    for i in range(1):
        beginnings = binary_dilation(beginnings)

    # Load trackings
    if tracking_format == "trk_legacy":
        streams, hdr = trackvis.read(bundle_path)
        streamlines = [s[0] for s in streams]
    else:
        sl_file = nib.streamlines.load(bundle_path)
        streamlines = sl_file.streamlines

    # Reduce streamline count
    streamlines = streamlines[::2]

    # Reorder to make all streamlines have same start region
    streamlines = fiber_utils.add_to_each_streamline(streamlines, 0.5)
    streamlines_new = []
    for idx, sl in enumerate(streamlines):
        startpoint = sl[0]
        # Flip streamline if not in right order
        if beginnings[int(startpoint[0]),
                      int(startpoint[1]),
                      int(startpoint[2])] == 0:
            sl = sl[::-1, :]
        streamlines_new.append(sl)
    streamlines = fiber_utils.add_to_each_streamline(streamlines_new, -0.5)

    if algorithm == "distance_map" or algorithm == "equal_dist":
        streamlines = fiber_utils.resample_fibers(
            streamlines, NR_SEGMENTS * ANTI_INTERPOL_MULT)
    elif algorithm == "cutting_plane":
        streamlines = fiber_utils.resample_to_same_distance(
            streamlines,
            max_nr_points=NR_SEGMENTS,
            ANTI_INTERPOL_MULT=ANTI_INTERPOL_MULT)

    # Cut start and end by percentage
    # streamlines = FiberUtils.resample_fibers(streamlines, NR_SEGMENTS * ANTI_INTERPOL_MULT)
    # remove = int((NR_SEGMENTS * ANTI_INTERPOL_MULT) * 0.15)  # remove X% in beginning and end
    # streamlines = np.array(streamlines)[:, remove:-remove, :]
    # streamlines = list(streamlines)

    if algorithm == "equal_dist":
        segment_idxs = []
        for i in range(len(streamlines)):
            segment_idxs.append(list(range(NR_SEGMENTS * ANTI_INTERPOL_MULT)))
        segment_idxs = np.array(segment_idxs)

    elif algorithm == "distance_map":
        metric = AveragePointwiseEuclideanMetric()
        qb = QuickBundles(threshold=100., metric=metric)
        clusters = qb.cluster(streamlines)
        centroids = Streamlines(clusters.centroids)
        _, segment_idxs = cKDTree(centroids.data, 1,
                                  copy_data=True).query(streamlines, k=1)

    elif algorithm == "cutting_plane":
        streamlines_resamp = fiber_utils.resample_fibers(
            streamlines, NR_SEGMENTS * ANTI_INTERPOL_MULT)
        metric = AveragePointwiseEuclideanMetric()
        qb = QuickBundles(threshold=100., metric=metric)
        clusters = qb.cluster(streamlines_resamp)
        centroid = Streamlines(clusters.centroids)[0]
        # index of the middle cluster
        middle_idx = int(NR_SEGMENTS / 2) * ANTI_INTERPOL_MULT
        middle_point = centroid[middle_idx]
        segment_idxs = fiber_utils.get_idxs_of_closest_points(
            streamlines, middle_point)
        # Align along the middle and assign indices
        segment_idxs_eqlen = []
        for idx, sl in enumerate(streamlines):
            sl_middle_pos = segment_idxs[idx]
            before_elems = sl_middle_pos
            after_elems = len(sl) - sl_middle_pos
            base_idx = 1000  # use higher index to avoid negative numbers for area below middle
            r = range((base_idx - before_elems), (base_idx + after_elems))
            segment_idxs_eqlen.append(r)
        segment_idxs = segment_idxs_eqlen

    # Add extra point otherwise coloring BUG
    streamlines = _add_extra_point_to_last_streamline(streamlines)

    renderer = window.Renderer()
    colors_all = []  # final shape will be [nr_streamlines, nr_points, 3]
    for jdx, sl in enumerate(streamlines):
        colors_sl = []
        for idx, p in enumerate(sl):
            if idx >= len(segment_idxs[jdx]):
                seg_idx = segment_idxs[jdx][idx - 1]
            else:
                seg_idx = segment_idxs[jdx][idx]

            m = metrics[int(seg_idx / ANTI_INTERPOL_MULT)]
            color = colors[int(m)]
            colors_sl.append(color)
        colors_all.append(
            colors_sl
        )  # this can not be converted to numpy array because last element has one more elem

    sl_actor = actor.streamtube(streamlines,
                                colors=colors_all,
                                linewidth=0.2,
                                opacity=1)
    renderer.add(sl_actor)

    # plot brain mask
    mask = nib.load(brain_mask_path).get_fdata().astype(np.uint8)
    cont_actor = vtk_utils.contour_from_roi_smooth(
        mask,
        affine=beginnings_img.affine,
        color=[.9, .9, .9],
        opacity=.2,
        smoothing=50)
    renderer.add(cont_actor)

    if show_color_bar:
        lut_cmap = actor.colormap_lookup_table(scale_range=(metrics_min,
                                                            metrics_max),
                                               hue_range=(0.0, 0.0),
                                               saturation_range=(0.0, 1.0))
        renderer.add(actor.scalar_bar(lut_cmap))

    if orientation == "sagittal":
        renderer.set_camera(position=(-412.95, -34.38, 80.15),
                            focal_point=(102.46, -16.96, -11.71),
                            view_up=(0.1806, 0.0, 0.9835))
    elif orientation == "coronal":
        renderer.set_camera(position=(-48.63, 360.31, 98.37),
                            focal_point=(-20.16, 92.89, 36.02),
                            view_up=(-0.0047, -0.2275, 0.9737))
    elif orientation == "axial":
        pass
    else:
        raise ValueError("Invalid orientation provided")

    # Use this to interatively get new camera angle
    # window.show(renderer, size=img_size, reset_camera=False)
    # print(renderer.get_camera())

    window.record(renderer, out_path=output_path, size=img_size)
Пример #27
0
                       [68.90222168, 93.46326447, 122.01765442],
                       [68.99872589, 93.30039978, 122.84759521],
                       [69.04119873, 93.05428314, 123.66156769],
                       [69.05086517, 92.74394989, 124.45450592],
                       [69.02742004, 92.40427399, 125.23509979],
                       [68.95466614, 92.09059143, 126.02339935],
                       [68.84975433, 91.79674531, 126.81564331],
                       [68.72673798, 91.53726196, 127.61715698],
                       [68.60685731, 91.30300141, 128.42681885],
                       [68.50636292, 91.12481689, 129.25317383],
                       [68.39311218, 91.01572418, 130.08976746],
                       [68.25946808, 90.94654083, 130.92756653]],
                      dtype=np.float32)

streamlines = Streamlines([
    streamline[[0, 10]], streamline, streamline[::2], streamline[::3],
    streamline[::5], streamline[::6]
])


def io_tractogram(extension):
    with InTemporaryDirectory():
        fname = 'test.{}'.format(extension)

        in_affine = np.eye(4)
        in_dimensions = np.array([50, 50, 50])
        in_voxel_sizes = np.array([2, 1.5, 1.5])
        nii_header = create_nifti_header(in_affine, in_dimensions,
                                         in_voxel_sizes)
        sft = StatefulTractogram(streamlines, nii_header, space=Space.RASMM)

        save_tractogram(sft, fname, bbox_valid_check=False)
Пример #28
0
def test_cluster_confidence():
    mysl = np.array([np.arange(10)] * 3, 'float').T

    # a short streamline (<20 mm) should raise an error unless override=True
    test_streamlines = Streamlines()
    test_streamlines.append(mysl)
    assert_raises(ValueError, cluster_confidence, test_streamlines)
    cci = cluster_confidence(test_streamlines, override=True)

    # two identical streamlines should raise an error
    test_streamlines = Streamlines()
    test_streamlines.append(mysl, cache_build=True)
    test_streamlines.append(mysl)
    test_streamlines.finalize_append()
    assert_raises(ValueError, cluster_confidence, test_streamlines)

    # 3 offset collinear streamlines
    test_streamlines = Streamlines()
    test_streamlines.append(mysl, cache_build=True)
    test_streamlines.append(mysl+1)
    test_streamlines.append(mysl+2)
    test_streamlines.finalize_append()
    cci = cluster_confidence(test_streamlines, override=True)
    assert_equal(cci[0], cci[2])
    assert_true(cci[1] > cci[0])

    # 3 parallel streamlines
    mysl = np.zeros([10, 3])
    mysl[:, 0] = np.arange(10)
    mysl2 = mysl.copy()
    mysl2[:, 1] = 1
    mysl3 = mysl.copy()
    mysl3[:, 1] = 2
    mysl4 = mysl.copy()
    mysl4[:, 1] = 4
    mysl5 = mysl.copy()
    mysl5[:, 1] = 5000

    test_streamlines_p1 = Streamlines()
    test_streamlines_p1.append(mysl, cache_build=True)
    test_streamlines_p1.append(mysl2)
    test_streamlines_p1.append(mysl3)
    test_streamlines_p1.finalize_append()
    test_streamlines_p2 = Streamlines()
    test_streamlines_p2.append(mysl, cache_build=True)
    test_streamlines_p2.append(mysl3)
    test_streamlines_p2.append(mysl4)
    test_streamlines_p2.finalize_append()
    test_streamlines_p3 = Streamlines()
    test_streamlines_p3.append(mysl, cache_build=True)
    test_streamlines_p3.append(mysl2)
    test_streamlines_p3.append(mysl3)
    test_streamlines_p3.append(mysl5)
    test_streamlines_p3.finalize_append()

    cci_p1 = cluster_confidence(test_streamlines_p1, override=True)
    cci_p2 = cluster_confidence(test_streamlines_p2, override=True)

    # test relative distance
    assert_array_equal(cci_p1, cci_p2*2)

    # test simple cci calculation
    expected_p1 = np.array([1./1+1./2, 1./1+1./1, 1./1+1./2])
    expected_p2 = np.array([1./2+1./4, 1./2+1./2, 1./2+1./4])
    assert_array_equal(expected_p1, cci_p1)
    assert_array_equal(expected_p2, cci_p2)

    # test power variable calculation (dropoff with distance)
    cci_p1_pow2 = cluster_confidence(test_streamlines_p1, power=2,
                                     override=True)
    expected_p1_pow2 = np.array([np.power(1./1, 2)+np.power(1./2, 2),
                                 np.power(1./1, 2)+np.power(1./1, 2),
                                 np.power(1./1, 2)+np.power(1./2, 2)])

    assert_array_equal(cci_p1_pow2, expected_p1_pow2)

    # test max distance (ignore distant sls)
    cci_dist = cluster_confidence(test_streamlines_p3,
                                  max_mdf=5, override=True)
    expected_cci_dist = np.concatenate([cci_p1, np.zeros(1)])
    assert_array_equal(cci_dist, expected_cci_dist)
Пример #29
0
def evaluate_streamline_plausibility(dwi_data,
                                     gtab,
                                     mask_data,
                                     streamlines,
                                     affine=np.eye(4),
                                     sphere='repulsion724'):
    """
    Linear Fascicle Evaluation (LiFE) takes any connectome and uses a
    forward modelling approach to predict diffusion measurements in the
    same brain.

    Parameters
    ----------
    dwi_data : array
        4D array of dwi data.
    gtab : Obj
        DiPy object storing diffusion gradient information.
    mask_data : array
       3D Brain mask.
    streamlines : ArraySequence
        DiPy list/array-like object of streamline points from tractography.

    Returns
    -------
    streamlines : ArraySequence
        DiPy list/array-like object of streamline points from tractography.

    References
    ----------
    .. [1] Pestilli, F., Yeatman, J, Rokem, A. Kay, K. and Wandell B.A. (2014).
     Validation and statistical inference in living connectomes.
     Nature Methods 11: 1058-1063. doi:10.1038/nmeth.3098
    """
    import dipy.tracking.life as life
    import dipy.core.optimize as opt
    from dipy.tracking._utils import _mapping_to_voxel
    # from dipy.data import get_sphere
    from dipy.tracking import utils
    from dipy.tracking.streamline import Streamlines

    original_count = len(streamlines)

    streamlines_long = nib.streamlines. \
        array_sequence.ArraySequence(
        [
            s
            for s in streamlines
            if len(s) >= float(10)
        ]
    )
    print('Removing streamlines with negative voxel indices...')
    # Remove any streamlines with negative voxel indices
    lin_T, offset = _mapping_to_voxel(np.eye(4))
    streamlines_positive = []
    for sl in streamlines_long:
        inds = np.dot(sl, lin_T)
        inds += offset
        if not inds.min().round(decimals=6) < 0:
            streamlines_positive.append(sl)
    del streamlines_long

    # Filter resulting streamlines by those that stay entirely
    # inside the ROI of interest
    mask_data = np.array(mask_data, dtype=bool, copy=False)
    streamlines_in_brain = Streamlines(
        utils.target(streamlines_positive, np.eye(4), mask_data, include=True))
    streamlines_in_brain = [i for i in streamlines_in_brain]
    del streamlines_positive
    print('Fitting fiber model...')

    # ! Remember this 4d masking function !
    data_in_mask = np.nan_to_num(
        np.broadcast_to(mask_data[..., None], dwi_data.shape).astype('bool') *
        dwi_data)
    # ! Remember this 4d masking function !

    fiber_model = life.FiberModel(gtab)
    fiber_fit = fiber_model.fit(data_in_mask,
                                streamlines_in_brain,
                                affine=affine,
                                sphere=False)
    # sphere = get_sphere(sphere)
    # fiber_fit = fiber_model.fit(data_in_mask, streamlines_in_brain,
    #                             affine=affine,
    #                             sphere=sphere)
    streamlines = list(
        np.array(streamlines_in_brain)[np.where(fiber_fit.beta > 0)[0]])
    pruned_count = len(streamlines)
    if pruned_count == 0:
        print(
            UserWarning('\nWarning LiFE skipped due to implausible values '
                        'detected in model betas. This does not '
                        'necessarily invalidate the '
                        'tractography. Rather it could indicate that '
                        'you\'ve sampled too few streamlines, or that the '
                        'sampling scheme is simply incompatible with the '
                        'LiFE model. Is your acquisition hemispheric? '
                        'Also check the gradient table for errors. \n'))
        return streamlines_in_brain
    else:
        del streamlines_in_brain

    model_predict = fiber_fit.predict()
    model_error = model_predict - fiber_fit.data
    model_rmse = np.sqrt(np.mean(model_error[:, 10:]**2, -1))
    beta_baseline = np.zeros(fiber_fit.beta.shape[0])
    pred_weighted = np.reshape(
        opt.spdot(fiber_fit.life_matrix, beta_baseline),
        (fiber_fit.vox_coords.shape[0], np.sum(~gtab.b0s_mask)))
    mean_pred = np.empty((fiber_fit.vox_coords.shape[0], gtab.bvals.shape[0]))
    S0 = fiber_fit.b0_signal
    mean_pred[..., gtab.b0s_mask] = S0[:, None]
    mean_pred[...,
              ~gtab.b0s_mask] = (pred_weighted +
                                 fiber_fit.mean_signal[:, None]) * S0[:, None]
    mean_error = mean_pred - fiber_fit.data
    mean_rmse = np.sqrt(np.mean(mean_error**2, -1))
    print(f"Original # Streamlines: {original_count}")
    print(f"Final # Streamlines: {pruned_count}")
    print(f"Streamlines removed: {pruned_count - original_count}")
    print(f"Mean RMSE: {np.mean(mean_rmse)}")
    print(f"Mean Model RMSE: {np.mean(model_rmse)}")
    print(f"Mean Reduction RMSE: {np.mean(mean_rmse - model_rmse)}")
    return streamlines
Пример #30
0
    def localTracking(self):
        if self.graddev is None:

            #multiply by the jacobian (zero out z-direction)
            graddev = np.zeros([3, 3])
            graddev[0, 0] = 1
            graddev[1, 1] = 1
            graddev[2, 2] = 1

            new_peak_dirsp = np.einsum('ab,ijkvb->aijkv', graddev,
                                       self.peaks.peak_dirs)
            shape = new_peak_dirsp.shape
            new_peak_dirsp = new_peak_dirsp.reshape(3, -1)
            new_peak_dirs = copy.deepcopy(new_peak_dirsp)
            for i in range(0, new_peak_dirs.shape[-1]):
                norm = np.linalg.norm(new_peak_dirsp[:, i])
                if norm != 0:
                    new_peak_dirs[:, i] = new_peak_dirsp[:, i] / norm
            new_peak_dirs = new_peak_dirs.reshape(shape)
            new_peak_dirs = np.moveaxis(new_peak_dirs, 0, -1)
            new_peak_dirs = new_peak_dirs.reshape(
                [-1, self.peaks.peak_indices.shape[-1], 3])
            #update self.peaks.peak_indices
            peak_indices = np.zeros(self.peaks.peak_indices.shape)
            peak_indices = peak_indices.reshape(
                [-1, self.peaks.peak_indices.shape[-1]])

            for i in range(0, peak_indices.shape[0]):
                for k in range(0, self.peaks.peak_indices.shape[-1]):
                    peak_indices[i, k] = self.sphere.find_closest(
                        new_peak_dirs[i, k, :])

            self.peaks.peak_indices = peak_indices.reshape(
                self.peaks.peak_indices.shape)

            streamlines_generator = LocalTracking(self.peaks,
                                                  self.stopping_criterion,
                                                  self.seeds,
                                                  self.affine,
                                                  step_size=abs(
                                                      self.affine[0, 0] / 6))
            self.streamlines = Streamlines(streamlines_generator)

        else:
            shape = self.graddev.shape
            self.graddev = self.graddev.reshape(shape[0:3] + (3, 3), order='F')
            #self.graddev[:, :, :, :, 2] = 0
            #self.graddev[:, :, :, 2, :] = 0
            #self.graddev[:, :, :, 2, 2] = -1

            self.graddev = (self.graddev.reshape([-1, 3, 3]) + np.eye(3))
            self.graddev = self.graddev.reshape(shape[0:3] + (3, 3))

            #multiply by the jacobian
            new_peak_dirsp = np.einsum('ijkab,ijkvb->aijkv', self.graddev,
                                       self.peaks.peak_dirs)
            shape = new_peak_dirsp.shape
            new_peak_dirsp = new_peak_dirsp.reshape(3, -1)
            new_peak_dirs = copy.deepcopy(new_peak_dirsp)
            for i in range(0, new_peak_dirs.shape[-1]):
                norm = np.linalg.norm(new_peak_dirsp[:, i])
                if norm != 0:
                    new_peak_dirs[:, i] = new_peak_dirsp[:, i] / norm
            new_peak_dirs = new_peak_dirs.reshape(shape)
            new_peak_dirs = np.moveaxis(new_peak_dirs, 0, -1)
            new_peak_dirs = new_peak_dirs.reshape(
                [-1, self.peaks.peak_indices.shape[-1], 3])
            #update self.peaks.peak_indices
            peak_indices = np.zeros(self.peaks.peak_indices.shape)
            peak_indices = peak_indices.reshape(
                [-1, self.peaks.peak_indices.shape[-1]])

            for i in range(0, peak_indices.shape[0]):
                for k in range(0, self.peaks.peak_indices.shape[-1]):
                    peak_indices[i, k] = self.sphere.find_closest(
                        new_peak_dirs[i, k, :])

            self.peaks.peak_indices = peak_indices.reshape(
                self.peaks.peak_indices.shape)

            streamlines_generator = LocalTracking(self.peaks,
                                                  self.stopping_criterion,
                                                  self.seeds,
                                                  self.affine,
                                                  step_size=self.affine[0, 0] /
                                                  6)

            self.streamlines = Streamlines(streamlines_generator)
            self.NpointsPerLine = pointsPerLine(self.streamlines)
Пример #31
0
# Make a corpus callosum seed mask for tracking
seed_mask = labels == 2
seeds = utils.seeds_from_mask(seed_mask, density=[1, 1, 1], affine=affine)
# Make a streamline bundle model of the corpus callosum ROI connectivity
streamlines = LocalTracking(csa_peaks, classifier, seeds, affine,
                            step_size=2)
streamlines = Streamlines(streamlines)


"""
We do not want our results inflated by short streamlines, so we remove
streamlines shorter than 40mm prior to calculating the CCI.
"""

lengths = list(length(streamlines))
long_streamlines = Streamlines()
for i, sl in enumerate(streamlines):
    if lengths[i] > 40:
        long_streamlines.append(sl)


"""
Now we calculate the Cluster Confidence Index using the corpus callosum
streamline bundle and visualize them.
"""


cci = cluster_confidence(long_streamlines)

# Visualize the streamlines, colored by cci
ren = window.Renderer()
Пример #32
0
s_list = []
'''new func:'''
for i in range(id.__len__()):  #
    for j in range(i + 1):  #
        edge_s_list = []
        # print(i,j)
        if (i + 1, j + 1) in streamline_dict and mat_medians[i, j] > 0:
            edge_s_list += streamline_dict[(i + 1, j + 1)]
        if (j + 1, i + 1) in streamline_dict and mat_medians[i, j] > 0:
            edge_s_list += streamline_dict[(j + 1, i + 1)]
        edge_vec_vols = [mat_medians[i, j]] * edge_s_list.__len__()

        s_list = s_list + edge_s_list
        vec_vols = vec_vols + edge_vec_vols

s = Streamlines(s_list)

cci = cluster_confidence(s)

keep_streamlines = Streamlines()
for i, sl in enumerate(s):
    if cci[i] >= 1:
        keep_streamlines.append(sl)

# Visualize the streamlines we kept
ren = window.Renderer()

keep_streamlines_actor = actor.line(keep_streamlines, linewidth=0.1)

ren.add(keep_streamlines_actor)