コード例 #1
0
def select_by_vol_rois(streamlines, rois, include, mode=None, affine=None, tol=None):
    """
    Include or exclude the streamlines according to some ROIs
    example
    >>>selection = select_by_vol_rois(streamlines, [mask1, mask2], [True, False], mode="both_end", tol=1.0)
    >>>selection = list(selection)
    """
    rois_selection = streamline.select_by_rois(streamline=streamlines, rois=rois,
                                               include=include, mode=mode, affine=affine, tol=tol)
    rois_streamlines = list(rois_selection)

    return rois_streamlines
コード例 #2
0
ファイル: streamlines.py プロジェクト: alexpron/sctva
def select_streamlines_between_peaks_from_spheres(streamlines,
                                                  peaks_rois,
                                                  affine,
                                                  index_peak1,
                                                  index_peak2,
                                                  radius=5):
    from dipy.tracking.streamline import select_by_rois
    import numpy as np
    roi1 = peaks_rois[index_peak1]
    roi2 = peaks_rois[index_peak2]
    roi = roi1 + roi2
    roi = np.expand_dims(roi, axis=0)
    selected_tracks = select_by_rois(streamlines, affine=affine,rois=roi, \
                                                                   mode='both_end',
                                     include=np.array([True]),
                                     tol=radius)
    return selected_tracks
コード例 #3
0
ファイル: test_streamline.py プロジェクト: sahmed95/dipy
def test_select_by_rois():
    streamlines = [np.array([[0, 0., 0.9],
                             [1.9, 0., 0.]]),
                   np.array([[0.1, 0., 0],
                             [0, 1., 1.],
                             [0, 2., 2.]]),
                   np.array([[2, 2, 2],
                             [3, 3, 3]])]

    # Make two ROIs:
    mask1 = np.zeros((4, 4, 4), dtype=bool)
    mask2 = np.zeros_like(mask1)
    mask1[0, 0, 0] = True
    mask2[1, 0, 0] = True

    selection = select_by_rois(streamlines, [mask1], [True],
                               tol=1)

    npt.assert_array_equal(list(selection), [streamlines[0],
                           streamlines[1]])

    selection = select_by_rois(streamlines, [mask1, mask2], [True, True],
                               tol=1)

    npt.assert_array_equal(list(selection), [streamlines[0],
                           streamlines[1]])

    selection = select_by_rois(streamlines, [mask1, mask2], [True, False])

    npt.assert_array_equal(list(selection), [streamlines[1]])

    # Setting tolerance too low gets overridden:
    selection = select_by_rois(streamlines, [mask1, mask2], [True, False],
                               tol=0.1)
    npt.assert_array_equal(list(selection), [streamlines[1]])

    selection = select_by_rois(streamlines, [mask1, mask2], [True, True],
                               tol=0.87)

    npt.assert_array_equal(list(selection), [streamlines[1]])

    mask3 = np.zeros_like(mask1)
    mask3[0, 2, 2] = 1
    selection = select_by_rois(streamlines, [mask1, mask2, mask3],
                               [True, True, False], tol=1.0)

    npt.assert_array_equal(list(selection), [streamlines[0]])

    # Select using only one ROI
    selection = select_by_rois(streamlines, [mask1], [True], tol=0.87)
    npt.assert_array_equal(list(selection), [streamlines[1]])

    selection = select_by_rois(streamlines, [mask1], [True], tol=1.0)
    npt.assert_array_equal(list(selection), [streamlines[0],
                           streamlines[1]])

    # Use different modes:
    selection = select_by_rois(streamlines, [mask1, mask2, mask3],
                               [True, True, False],
                               mode="all",
                               tol=1.0)
    npt.assert_array_equal(list(selection), [streamlines[0]])

    selection = select_by_rois(streamlines, [mask1, mask2, mask3],
                               [True, True, False],
                               mode="either_end",
                               tol=1.0)
    npt.assert_array_equal(list(selection), [streamlines[0]])

    selection = select_by_rois(streamlines, [mask1, mask2, mask3],
                               [True, True, False],
                               mode="both_end",
                               tol=1.0)
    npt.assert_array_equal(list(selection), [streamlines[0]])

    mask2[0, 2, 2] = True
    selection = select_by_rois(streamlines, [mask1, mask2, mask3],
                               [True, True, False],
                               mode="both_end",
                               tol=1.0)

    npt.assert_array_equal(list(selection), [streamlines[0],
                                             streamlines[1]])

    # Test with generator input:
    selection = select_by_rois(generate_sl(streamlines), [mask1], [True],
                               tol=1.0)
    npt.assert_array_equal(list(selection), [streamlines[0],
                           streamlines[1]])
コード例 #4
0
ファイル: test_streamline.py プロジェクト: okjoh/dipy
def test_select_by_rois():
    streamlines = [
        np.array([[0, 0., 0.9], [1.9, 0., 0.]]),
        np.array([[0.1, 0., 0], [0, 1., 1.], [0, 2., 2.]]),
        np.array([[2, 2, 2], [3, 3, 3]])
    ]

    # Make two ROIs:
    mask1 = np.zeros((4, 4, 4), dtype=bool)
    mask2 = np.zeros_like(mask1)
    mask1[0, 0, 0] = True
    mask2[1, 0, 0] = True

    selection = select_by_rois(streamlines, [mask1], [True], tol=1)

    assert_arrays_equal(list(selection), [streamlines[0], streamlines[1]])

    selection = select_by_rois(streamlines, [mask1, mask2], [True, True],
                               tol=1)

    assert_arrays_equal(list(selection), [streamlines[0], streamlines[1]])

    selection = select_by_rois(streamlines, [mask1, mask2], [True, False])

    assert_arrays_equal(list(selection), [streamlines[1]])

    # Setting tolerance too low gets overridden:
    selection = select_by_rois(streamlines, [mask1, mask2], [True, False],
                               tol=0.1)
    assert_arrays_equal(list(selection), [streamlines[1]])

    selection = select_by_rois(streamlines, [mask1, mask2], [True, True],
                               tol=0.87)

    assert_arrays_equal(list(selection), [streamlines[1]])

    mask3 = np.zeros_like(mask1)
    mask3[0, 2, 2] = 1
    selection = select_by_rois(streamlines, [mask1, mask2, mask3],
                               [True, True, False],
                               tol=1.0)

    assert_arrays_equal(list(selection), [streamlines[0]])

    # Select using only one ROI
    selection = select_by_rois(streamlines, [mask1], [True], tol=0.87)
    assert_arrays_equal(list(selection), [streamlines[1]])

    selection = select_by_rois(streamlines, [mask1], [True], tol=1.0)
    assert_arrays_equal(list(selection), [streamlines[0], streamlines[1]])

    # Use different modes:
    selection = select_by_rois(streamlines, [mask1, mask2, mask3],
                               [True, True, False],
                               mode="all",
                               tol=1.0)
    assert_arrays_equal(list(selection), [streamlines[0]])

    selection = select_by_rois(streamlines, [mask1, mask2, mask3],
                               [True, True, False],
                               mode="either_end",
                               tol=1.0)
    assert_arrays_equal(list(selection), [streamlines[0]])

    selection = select_by_rois(streamlines, [mask1, mask2, mask3],
                               [True, True, False],
                               mode="both_end",
                               tol=1.0)
    assert_arrays_equal(list(selection), [streamlines[0]])

    mask2[0, 2, 2] = True
    selection = select_by_rois(streamlines, [mask1, mask2, mask3],
                               [True, True, False],
                               mode="both_end",
                               tol=1.0)

    assert_arrays_equal(list(selection), [streamlines[0], streamlines[1]])

    # Test with generator input:
    selection = select_by_rois(generate_sl(streamlines), [mask1], [True],
                               tol=1.0)
    assert_arrays_equal(list(selection), [streamlines[0], streamlines[1]])
コード例 #5
0
ファイル: track.py プロジェクト: dPys/PyNets
def run_tracking(step_curv_combinations,
                 recon_shelved,
                 n_seeds_per_iter,
                 traversal,
                 maxcrossing,
                 max_length,
                 pft_back_tracking_dist,
                 pft_front_tracking_dist,
                 particle_count,
                 roi_neighborhood_tol,
                 min_length,
                 track_type,
                 min_separation_angle,
                 sphere,
                 tiss_class,
                 tissue_shelved,
                 verbose=False):
    """
    Create a density map of the list of streamlines.

    Parameters
    ----------
    step_curv_combinations : list
        List of tuples representing all pair combinations of step sizes and
        curvature thresholds from which to sample streamlines.
    recon_path : str
        File path to diffusion reconstruction model.
    n_seeds_per_iter : int
        Number of seeds from which to initiate tracking for each unique
        ensemble combination. By default this is set to 250.
    directget : str
        The statistical approach to tracking. Options are: det (deterministic),
        closest (clos), boot (bootstrapped), and prob (probabilistic).
    maxcrossing : int
        Maximum number if diffusion directions that can be assumed per voxel
        while tracking.
    max_length : int
        Maximum number of steps to restrict tracking.
    pft_back_tracking_dist : float
        Distance in mm to back track before starting the particle filtering
        tractography. The total particle filtering tractography distance is
        equal to back_tracking_dist + front_tracking_dist. By default this is
        set to 2 mm.
    pft_front_tracking_dist : float
        Distance in mm to run the particle filtering tractography after the
        the back track distance. The total particle filtering tractography
        distance is equal to back_tracking_dist + front_tracking_dist. By
        default this is set to 1 mm.
    particle_count : int
        Number of particles to use in the particle filter.
    roi_neighborhood_tol : float
        Distance (in the units of the streamlines, usually mm). If any
        coordinate in the streamline is within this distance from the center
        of any voxel in the ROI, the filtering criterion is set to True for
        this streamline, otherwise False. Defaults to the distance between
        the center of each voxel and the corner of the voxel.
    waymask_data : ndarray
        Tractography constraint mask array in native diffusion space.
    min_length : int
        Minimum fiber length threshold in mm to restrict tracking.
    track_type : str
        Tracking algorithm used (e.g. 'local' or 'particle').
    min_separation_angle : float
        The minimum angle between directions [0, 90].
    sphere : obj
        DiPy object for modeling diffusion directions on a sphere.
    tiss_class : str
        Tissue classification method.
    tissue_shelved : str
        File path to joblib-shelved 4D T1w tissue segmentations in native
        diffusion space.

    Returns
    -------
    streamlines : ArraySequence
        DiPy list/array-like object of streamline points from tractography.
    """
    import gc
    import time
    import numpy as np
    from dipy.tracking import utils
    from dipy.tracking.streamline import select_by_rois
    from dipy.tracking.local_tracking import LocalTracking, \
        ParticleFilteringTracking
    from dipy.direction import (ProbabilisticDirectionGetter,
                                ClosestPeakDirectionGetter,
                                DeterministicMaximumDirectionGetter)
    from nilearn.image import index_img, math_img
    from pynets.dmri.utils import generate_seeds, random_seeds_from_mask
    from nibabel.streamlines.array_sequence import ArraySequence

    start_time = time.time()

    if verbose is True:
        print("%s%s%s" % ('Preparing tissue constraints:',
                          np.round(time.time() - start_time, 1), 's'))
        start_time = time.time()

    tissue_img = tissue_shelved.get()

    # Order:
    B0_mask = index_img(tissue_img, 0)
    atlas_img = index_img(tissue_img, 1)
    t1w2dwi = index_img(tissue_img, 3)
    gm_in_dwi = index_img(tissue_img, 4)
    vent_csf_in_dwi = index_img(tissue_img, 5)
    wm_in_dwi = index_img(tissue_img, 6)
    tissue_img.uncache()

    tiss_classifier = prep_tissues(t1w2dwi, gm_in_dwi, vent_csf_in_dwi,
                                   wm_in_dwi, tiss_class, B0_mask)

    # if verbose is True:
    #     print("%s%s%s" % (
    #     'Fitting tissue classifier:',
    #     np.round(time.time() - start_time, 1), 's'))
    #     start_time = time.time()

    if verbose is True:
        print("%s%s%s" % ('Loading reconstruction:',
                          np.round(time.time() - start_time, 1), 's'))
        start_time = time.time()

        print("%s%s" % ("Curvature: ", step_curv_combinations[1]))

    # Instantiate DirectionGetter
    if traversal.lower() in ["probabilistic", "prob"]:
        dg = ProbabilisticDirectionGetter.from_shcoeff(
            recon_shelved.get(),
            max_angle=float(step_curv_combinations[1]),
            sphere=sphere,
            min_separation_angle=min_separation_angle,
        )
    elif traversal.lower() in ["closestpeaks", "cp"]:
        dg = ClosestPeakDirectionGetter.from_shcoeff(
            recon_shelved.get(),
            max_angle=float(step_curv_combinations[1]),
            sphere=sphere,
            min_separation_angle=min_separation_angle,
        )
    elif traversal.lower() in ["deterministic", "det"]:
        maxcrossing = 1
        dg = DeterministicMaximumDirectionGetter.from_shcoeff(
            recon_shelved.get(),
            max_angle=float(step_curv_combinations[1]),
            sphere=sphere,
            min_separation_angle=min_separation_angle,
        )
    else:
        raise ValueError("ERROR: No valid direction getter(s) specified.")

    if verbose is True:
        print("%s%s%s" % ('Extracting directions:',
                          np.round(time.time() - start_time, 1), 's'))
        start_time = time.time()
        print("%s%s" % ("Step: ", step_curv_combinations[0]))

    # Perform wm-gm interface seeding, using n_seeds at a time
    seeds = generate_seeds(
        random_seeds_from_mask(np.asarray(
            math_img("img > 0.01", img=index_img(
                tissue_img, 2)).dataobj).astype("bool").astype("int16") > 0,
                               seeds_count=n_seeds_per_iter,
                               random_seed=42))

    if verbose is True:
        print("%s%s%s" % ('Drawing random seeds:',
                          np.round(time.time() - start_time, 1), 's'))
        start_time = time.time()
        # print(seeds)

    # Perform tracking
    if track_type == "local":
        streamline_generator = LocalTracking(dg,
                                             tiss_classifier,
                                             np.stack([i for i in seeds]),
                                             np.eye(4),
                                             max_cross=int(maxcrossing),
                                             maxlen=int(max_length),
                                             step_size=float(
                                                 step_curv_combinations[0]),
                                             fixedstep=False,
                                             return_all=True,
                                             random_seed=42)
    elif track_type == "particle":
        streamline_generator = ParticleFilteringTracking(
            dg,
            tiss_classifier,
            np.stack([i for i in seeds]),
            np.eye(4),
            max_cross=int(maxcrossing),
            step_size=float(step_curv_combinations[0]),
            maxlen=int(max_length),
            pft_back_tracking_dist=pft_back_tracking_dist,
            pft_front_tracking_dist=pft_front_tracking_dist,
            pft_max_trial=20,
            particle_count=particle_count,
            return_all=True,
            random_seed=42)
    else:
        raise ValueError("ERROR: No valid tracking method(s) specified.")

    if verbose is True:
        print("%s%s%s" % ('Instantiating tracking:',
                          np.round(time.time() - start_time, 1), 's'))
        start_time = time.time()
        # print(seeds)

    del dg

    # Filter resulting streamlines by those that stay entirely
    # inside the brain
    try:
        roi_proximal_streamlines = utils.target(
            streamline_generator,
            np.eye(4),
            np.asarray(B0_mask.dataobj).astype('bool'),
            include=True)
    except BaseException:
        print('No streamlines found inside the brain! ' 'Check registrations.')
        #return None

    if verbose is True:
        print("%s%s%s" % ('Drawing streamlines:',
                          np.round(time.time() - start_time, 1), 's'))
        start_time = time.time()

    del seeds, tiss_classifier, streamline_generator

    B0_mask.uncache()
    atlas_img.uncache()
    t1w2dwi.uncache()
    gm_in_dwi.uncache()
    vent_csf_in_dwi.uncache()
    wm_in_dwi.uncache()
    gc.collect()

    # Filter resulting streamlines by roi-intersection
    # characteristics
    atlas_data = np.array(atlas_img.dataobj).astype("uint16")

    # Build mask vector from atlas for later roi filtering
    parcels = [
        atlas_data == roi_val
        for roi_val in [i for i in np.unique(atlas_data) if i != 0]
    ]

    try:
        roi_proximal_streamlines = \
                select_by_rois(
                    roi_proximal_streamlines,
                    affine=np.eye(4),
                    rois=parcels,
                    include=list(np.ones(len(parcels)).astype("bool")),
                    mode="any",
                    tol=roi_neighborhood_tol,
                )
    except BaseException:
        print('No streamlines found to connect any parcels! '
              'Check registrations.')
        #return None

    del atlas_data

    if verbose is True:
        print("%s%s%s" % ('Selecting by parcellation:',
                          np.round(time.time() - start_time, 1), 's'))
        start_time = time.time()

    del parcels

    gc.collect()

    if verbose is True:
        print("%s%s%s" % ('Selecting by minimum length criterion:',
                          np.round(time.time() - start_time, 1), 's'))

    gc.collect()

    return ArraySequence([
        s.astype("float32") for s in roi_proximal_streamlines
        if len(s) > float(min_length)
    ])
コード例 #6
0
ファイル: track.py プロジェクト: landmachine/PyNets
def run_tracking(step_curv_combinations,
                 recon_path,
                 n_seeds_per_iter,
                 directget,
                 maxcrossing,
                 max_length,
                 pft_back_tracking_dist,
                 pft_front_tracking_dist,
                 particle_count,
                 roi_neighborhood_tol,
                 waymask,
                 min_length,
                 track_type,
                 min_separation_angle,
                 sphere,
                 tiss_class,
                 tissues4d,
                 cache_dir,
                 min_seeds=100):

    import gc
    import os
    import h5py
    from dipy.tracking import utils
    from dipy.tracking.streamline import select_by_rois
    from dipy.tracking.local_tracking import LocalTracking, \
        ParticleFilteringTracking
    from dipy.direction import (ProbabilisticDirectionGetter,
                                ClosestPeakDirectionGetter,
                                DeterministicMaximumDirectionGetter)
    from nilearn.image import index_img
    from pynets.dmri.track import prep_tissues
    from nibabel.streamlines.array_sequence import ArraySequence
    from nipype.utils.filemanip import copyfile, fname_presuffix
    import uuid
    from time import strftime

    run_uuid = f"{strftime('%Y%m%d_%H%M%S')}_{uuid.uuid4()}"

    recon_path_tmp_path = fname_presuffix(
        recon_path,
        suffix=f"_{'_'.join([str(i) for i in step_curv_combinations])}_"
        f"{run_uuid}",
        newpath=cache_dir)
    copyfile(recon_path, recon_path_tmp_path, copy=True, use_hardlink=False)

    tissues4d_tmp_path = fname_presuffix(
        tissues4d,
        suffix=f"_{'_'.join([str(i) for i in step_curv_combinations])}_"
        f"{run_uuid}",
        newpath=cache_dir)
    copyfile(tissues4d, tissues4d_tmp_path, copy=True, use_hardlink=False)

    if waymask is not None:
        waymask_tmp_path = fname_presuffix(
            waymask,
            suffix=f"_{'_'.join([str(i) for i in step_curv_combinations])}_"
            f"{run_uuid}",
            newpath=cache_dir)
        copyfile(waymask, waymask_tmp_path, copy=True, use_hardlink=False)
    else:
        waymask_tmp_path = None

    tissue_img = nib.load(tissues4d_tmp_path)

    # Order:
    B0_mask = index_img(tissue_img, 0)
    atlas_img = index_img(tissue_img, 1)
    seeding_mask = index_img(tissue_img, 2)
    t1w2dwi = index_img(tissue_img, 3)
    gm_in_dwi = index_img(tissue_img, 4)
    vent_csf_in_dwi = index_img(tissue_img, 5)
    wm_in_dwi = index_img(tissue_img, 6)

    tiss_classifier = prep_tissues(t1w2dwi, gm_in_dwi, vent_csf_in_dwi,
                                   wm_in_dwi, tiss_class, B0_mask)

    B0_mask_data = np.asarray(B0_mask.dataobj).astype("bool")

    seeding_mask = np.asarray(
        seeding_mask.dataobj).astype("bool").astype("int16")

    with h5py.File(recon_path_tmp_path, 'r+') as hf:
        mod_fit = hf['reconstruction'][:].astype('float32')

    print("%s%s" % ("Curvature: ", step_curv_combinations[1]))

    # Instantiate DirectionGetter
    if directget.lower() in ["probabilistic", "prob"]:
        dg = ProbabilisticDirectionGetter.from_shcoeff(
            mod_fit,
            max_angle=float(step_curv_combinations[1]),
            sphere=sphere,
            min_separation_angle=min_separation_angle,
        )
    elif directget.lower() in ["closestpeaks", "cp"]:
        dg = ClosestPeakDirectionGetter.from_shcoeff(
            mod_fit,
            max_angle=float(step_curv_combinations[1]),
            sphere=sphere,
            min_separation_angle=min_separation_angle,
        )
    elif directget.lower() in ["deterministic", "det"]:
        maxcrossing = 1
        dg = DeterministicMaximumDirectionGetter.from_shcoeff(
            mod_fit,
            max_angle=float(step_curv_combinations[1]),
            sphere=sphere,
            min_separation_angle=min_separation_angle,
        )
    else:
        raise ValueError("ERROR: No valid direction getter(s) specified.")

    print("%s%s" % ("Step: ", step_curv_combinations[0]))

    # Perform wm-gm interface seeding, using n_seeds at a time
    seeds = utils.random_seeds_from_mask(
        seeding_mask > 0,
        seeds_count=n_seeds_per_iter,
        seed_count_per_voxel=False,
        affine=np.eye(4),
    )
    if len(seeds) < min_seeds:
        print(
            UserWarning(
                f"<{min_seeds} valid seed points found in wm-gm interface..."))
        return None

    # print(seeds)

    # Perform tracking
    if track_type == "local":
        streamline_generator = LocalTracking(dg,
                                             tiss_classifier,
                                             seeds,
                                             np.eye(4),
                                             max_cross=int(maxcrossing),
                                             maxlen=int(max_length),
                                             step_size=float(
                                                 step_curv_combinations[0]),
                                             fixedstep=False,
                                             return_all=True,
                                             random_seed=42)
    elif track_type == "particle":
        streamline_generator = ParticleFilteringTracking(
            dg,
            tiss_classifier,
            seeds,
            np.eye(4),
            max_cross=int(maxcrossing),
            step_size=float(step_curv_combinations[0]),
            maxlen=int(max_length),
            pft_back_tracking_dist=pft_back_tracking_dist,
            pft_front_tracking_dist=pft_front_tracking_dist,
            pft_max_trial=20,
            particle_count=particle_count,
            return_all=True,
            random_seed=42)
    else:
        raise ValueError("ERROR: No valid tracking method(s) specified.")

    # Filter resulting streamlines by those that stay entirely
    # inside the brain
    try:
        roi_proximal_streamlines = utils.target(streamline_generator,
                                                np.eye(4),
                                                B0_mask_data.astype('bool'),
                                                include=True)
    except BaseException:
        print('No streamlines found inside the brain! ' 'Check registrations.')
        return None

    del mod_fit, seeds, tiss_classifier, streamline_generator, \
        B0_mask_data, seeding_mask, dg

    B0_mask.uncache()
    atlas_img.uncache()
    t1w2dwi.uncache()
    gm_in_dwi.uncache()
    vent_csf_in_dwi.uncache()
    wm_in_dwi.uncache()
    atlas_img.uncache()
    tissue_img.uncache()
    gc.collect()

    # Filter resulting streamlines by roi-intersection
    # characteristics
    atlas_data = np.array(atlas_img.dataobj).astype("uint16")

    # Build mask vector from atlas for later roi filtering
    parcels = []
    i = 0
    intensities = [i for i in np.unique(atlas_data) if i != 0]
    for roi_val in intensities:
        parcels.append(atlas_data == roi_val)
        i += 1

    parcel_vec = list(np.ones(len(parcels)).astype("bool"))

    try:
        roi_proximal_streamlines = \
            nib.streamlines.array_sequence.ArraySequence(
                select_by_rois(
                    roi_proximal_streamlines,
                    affine=np.eye(4),
                    rois=parcels,
                    include=parcel_vec,
                    mode="any",
                    tol=roi_neighborhood_tol,
                )
            )
        print("%s%s" % ("Filtering by: \nNode intersection: ",
                        len(roi_proximal_streamlines)))
    except BaseException:
        print('No streamlines found to connect any parcels! '
              'Check registrations.')
        return None

    try:
        roi_proximal_streamlines = nib.streamlines. \
            array_sequence.ArraySequence(
                [
                    s for s in roi_proximal_streamlines
                    if len(s) >= float(min_length)
                ]
            )
        print(f"Minimum fiber length >{min_length}mm: "
              f"{len(roi_proximal_streamlines)}")
    except BaseException:
        print('No streamlines remaining after minimal length criterion.')
        return None

    if waymask is not None and os.path.isfile(waymask_tmp_path):
        waymask_data = np.asarray(
            nib.load(waymask_tmp_path).dataobj).astype("bool")
        try:
            roi_proximal_streamlines = roi_proximal_streamlines[utils.near_roi(
                roi_proximal_streamlines,
                np.eye(4),
                waymask_data,
                tol=int(round(roi_neighborhood_tol * 0.50, 1)),
                mode="all")]
            print("%s%s" %
                  ("Waymask proximity: ", len(roi_proximal_streamlines)))
            del waymask_data
        except BaseException:
            print('No streamlines remaining in waymask\'s vacinity.')
            return None

    hf.close()
    del parcels, atlas_data

    tmp_files = [tissues4d_tmp_path, waymask_tmp_path, recon_path_tmp_path]
    for j in tmp_files:
        if j is not None:
            if os.path.isfile(j):
                os.system(f"rm -f {j} &")

    if len(roi_proximal_streamlines) > 0:
        return ArraySequence(
            [s.astype("float32") for s in roi_proximal_streamlines])
    else:
        return None
コード例 #7
0
ファイル: track.py プロジェクト: devhliu/PyNets
def track_ensemble(dwi_data,
                   target_samples,
                   atlas_data_wm_gm_int,
                   parcels,
                   mod_fit,
                   tiss_classifier,
                   sphere,
                   directget,
                   curv_thr_list,
                   step_list,
                   track_type,
                   maxcrossing,
                   max_length,
                   roi_neighborhood_tol,
                   min_length,
                   waymask,
                   n_seeds_per_iter=100,
                   pft_back_tracking_dist=2,
                   pft_front_tracking_dist=1,
                   particle_count=15):
    """
    Perform native-space ensemble tractography, restricted to a vector of ROI masks.

    dwi_data : array
        4D array of dwi data.
    target_samples : int
        Total number of streamline samples specified to generate streams.
    atlas_data_wm_gm_int : array
        3D int32 numpy array of atlas parcellation intensities from Nifti1Image in T1w-warped native diffusion space,
        restricted to wm-gm interface.
    parcels : list
        List of 3D boolean numpy arrays of atlas parcellation ROI masks from a Nifti1Image in T1w-warped native
        diffusion space.
    mod : obj
        Connectivity reconstruction model.
    tiss_classifier : str
        Tissue classification method.
    sphere : obj
        DiPy object for modeling diffusion directions on a sphere.
    directget : str
        The statistical approach to tracking. Options are: det (deterministic), closest (clos), boot (bootstrapped),
        and prob (probabilistic).
    curv_thr_list : list
        List of integer curvature thresholds used to perform ensemble tracking.
    step_list : list
        List of float step-sizes used to perform ensemble tracking.
    track_type : str
        Tracking algorithm used (e.g. 'local' or 'particle').
    maxcrossing : int
        Maximum number if diffusion directions that can be assumed per voxel while tracking.
    max_length : int
        Maximum fiber length threshold in mm to restrict tracking.
    roi_neighborhood_tol : float
        Distance (in the units of the streamlines, usually mm). If any
        coordinate in the streamline is within this distance from the center
        of any voxel in the ROI, the filtering criterion is set to True for
        this streamline, otherwise False. Defaults to the distance between
        the center of each voxel and the corner of the voxel.
    min_length : int
        Minimum fiber length threshold in mm.
    waymask : str
        Path to a Nifti1Image in native diffusion space to constrain tractography.
    n_seeds_per_iter : int
        Number of seeds from which to initiate tracking for each unique ensemble combination.
        By default this is set to 200.
    particle_count
        pft_back_tracking_dist : float
        Distance in mm to back track before starting the particle filtering
        tractography. The total particle filtering tractography distance is
        equal to back_tracking_dist + front_tracking_dist. By default this is set to 2 mm.
    pft_front_tracking_dist : float
        Distance in mm to run the particle filtering tractography after the
        the back track distance. The total particle filtering tractography
        distance is equal to back_tracking_dist + front_tracking_dist. By
        default this is set to 1 mm.
    particle_count : int
        Number of particles to use in the particle filter.

    Returns
    -------
    streamlines : ArraySequence
        DiPy list/array-like object of streamline points from tractography.
    """
    from colorama import Fore, Style
    from dipy.tracking import utils
    from dipy.tracking.streamline import Streamlines, select_by_rois
    from dipy.tracking.local_tracking import LocalTracking, ParticleFilteringTracking
    from dipy.direction import ProbabilisticDirectionGetter, BootDirectionGetter, ClosestPeakDirectionGetter, DeterministicMaximumDirectionGetter

    if waymask:
        waymask_data = nib.load(waymask).get_fdata().astype('bool')

    # Commence Ensemble Tractography
    parcel_vec = list(np.ones(len(parcels)).astype('bool'))
    streamlines = nib.streamlines.array_sequence.ArraySequence()
    ix = 0
    circuit_ix = 0
    stream_counter = 0
    while int(stream_counter) < int(target_samples):
        for curv_thr in curv_thr_list:
            print("%s%s" % ('Curvature: ', curv_thr))

            # Instantiate DirectionGetter
            if directget == 'prob':
                dg = ProbabilisticDirectionGetter.from_shcoeff(
                    mod_fit, max_angle=float(curv_thr), sphere=sphere)
            elif directget == 'boot':
                dg = BootDirectionGetter.from_data(dwi_data,
                                                   mod_fit,
                                                   max_angle=float(curv_thr),
                                                   sphere=sphere)
            elif directget == 'clos':
                dg = ClosestPeakDirectionGetter.from_shcoeff(
                    mod_fit, max_angle=float(curv_thr), sphere=sphere)
            elif directget == 'det':
                dg = DeterministicMaximumDirectionGetter.from_shcoeff(
                    mod_fit, max_angle=float(curv_thr), sphere=sphere)
            else:
                raise ValueError(
                    'ERROR: No valid direction getter(s) specified.')

            for step in step_list:
                print("%s%s" % ('Step: ', step))

                # Perform wm-gm interface seeding, using n_seeds at a time
                seeds = utils.random_seeds_from_mask(
                    atlas_data_wm_gm_int > 0,
                    seeds_count=n_seeds_per_iter,
                    seed_count_per_voxel=False,
                    affine=np.eye(4))
                if len(seeds) == 0:
                    raise RuntimeWarning(
                        'Warning: No valid seed points found in wm-gm interface...'
                    )

                print(seeds)

                # Perform tracking
                if track_type == 'local':
                    streamline_generator = LocalTracking(
                        dg,
                        tiss_classifier,
                        seeds,
                        np.eye(4),
                        max_cross=int(maxcrossing),
                        maxlen=int(max_length),
                        step_size=float(step),
                        return_all=True)
                elif track_type == 'particle':
                    streamline_generator = ParticleFilteringTracking(
                        dg,
                        tiss_classifier,
                        seeds,
                        np.eye(4),
                        max_cross=int(maxcrossing),
                        step_size=float(step),
                        maxlen=int(max_length),
                        pft_back_tracking_dist=pft_back_tracking_dist,
                        pft_front_tracking_dist=pft_front_tracking_dist,
                        particle_count=particle_count,
                        return_all=True)
                else:
                    raise ValueError(
                        'ERROR: No valid tracking method(s) specified.')

                # Filter resulting streamlines by roi-intersection characteristics
                roi_proximal_streamlines = Streamlines(
                    select_by_rois(streamline_generator,
                                   affine=np.eye(4),
                                   rois=parcels,
                                   include=parcel_vec,
                                   mode='any',
                                   tol=roi_neighborhood_tol))

                print("%s%s" %
                      ('Qualifying Streamlines by node intersection: ',
                       len(roi_proximal_streamlines)))

                roi_proximal_streamlines = nib.streamlines.array_sequence.ArraySequence(
                    [
                        s for s in roi_proximal_streamlines
                        if len(s) > float(min_length)
                    ])

                print("%s%s" %
                      ('Qualifying Streamlines by minimum length criterion: ',
                       len(roi_proximal_streamlines)))

                if waymask:
                    roi_proximal_streamlines = roi_proximal_streamlines[
                        utils.near_roi(roi_proximal_streamlines,
                                       np.eye(4),
                                       waymask_data,
                                       tol=roi_neighborhood_tol,
                                       mode='any')]
                    print("%s%s" %
                          ('Qualifying Streamlines by waymask proximity: ',
                           len(roi_proximal_streamlines)))

                # Repeat process until target samples condition is met
                ix = ix + 1
                for s in roi_proximal_streamlines:
                    stream_counter = stream_counter + len(s)
                    streamlines.append(s)
                    if int(stream_counter) >= int(target_samples):
                        break
                    else:
                        continue

                # Cleanup memory
                del seeds, roi_proximal_streamlines, streamline_generator

            del dg

        circuit_ix = circuit_ix + 1
        print(
            "%s%s%s%s%s" %
            ('Completed hyperparameter circuit: ', circuit_ix,
             '...\nCumulative Streamline Count: ', Fore.CYAN, stream_counter))
        print(Style.RESET_ALL)

    print('\n')

    return streamlines
コード例 #8
0
def track_ensemble(target_samples, atlas_data_wm_gm_int, parcels, mod_fit, tiss_classifier, sphere, directget,
                   curv_thr_list, step_list, track_type, maxcrossing, roi_neighborhood_tol, min_length, waymask,
                   B0_mask, max_length=1000, n_seeds_per_iter=500, pft_back_tracking_dist=2, pft_front_tracking_dist=1,
                   particle_count=15, min_separation_angle=20):
    """
    Perform native-space ensemble tractography, restricted to a vector of ROI masks.

    target_samples : int
        Total number of streamline samples specified to generate streams.
    atlas_data_wm_gm_int : array
        3D int32 numpy array of atlas parcellation intensities from Nifti1Image in T1w-warped native diffusion space,
        restricted to wm-gm interface.
    parcels : list
        List of 3D boolean numpy arrays of atlas parcellation ROI masks from a Nifti1Image in T1w-warped native
        diffusion space.
    mod : obj
        Connectivity reconstruction model.
    tiss_classifier : str
        Tissue classification method.
    sphere : obj
        DiPy object for modeling diffusion directions on a sphere.
    directget : str
        The statistical approach to tracking. Options are: det (deterministic), closest (clos), boot (bootstrapped),
        and prob (probabilistic).
    curv_thr_list : list
        List of integer curvature thresholds used to perform ensemble tracking.
    step_list : list
        List of float step-sizes used to perform ensemble tracking.
    track_type : str
        Tracking algorithm used (e.g. 'local' or 'particle').
    maxcrossing : int
        Maximum number if diffusion directions that can be assumed per voxel while tracking.
    roi_neighborhood_tol : float
        Distance (in the units of the streamlines, usually mm). If any
        coordinate in the streamline is within this distance from the center
        of any voxel in the ROI, the filtering criterion is set to True for
        this streamline, otherwise False. Defaults to the distance between
        the center of each voxel and the corner of the voxel.
    min_length : int
        Minimum fiber length threshold in mm.
    waymask : str
        Path to a Nifti1Image in native diffusion space to constrain tractography.
    B0_mask : str
        File path to B0 brain mask.
    max_length : int
        Maximum number of steps to restrict tracking.
    n_seeds_per_iter : int
        Number of seeds from which to initiate tracking for each unique ensemble combination.
        By default this is set to 200.
    particle_count
        pft_back_tracking_dist : float
        Distance in mm to back track before starting the particle filtering
        tractography. The total particle filtering tractography distance is
        equal to back_tracking_dist + front_tracking_dist. By default this is set to 2 mm.
    pft_front_tracking_dist : float
        Distance in mm to run the particle filtering tractography after the
        the back track distance. The total particle filtering tractography
        distance is equal to back_tracking_dist + front_tracking_dist. By
        default this is set to 1 mm.
    particle_count : int
        Number of particles to use in the particle filter.
    min_separation_angle : float
        The minimum angle between directions [0, 90].

    Returns
    -------
    streamlines : ArraySequence
        DiPy list/array-like object of streamline points from tractography.

    References
    ----------
    .. [1] Takemura, H., Caiafa, C. F., Wandell, B. A., & Pestilli, F. (2016).
      Ensemble Tractography. PLoS Computational Biology.
      https://doi.org/10.1371/journal.pcbi.1004692

    """
    import gc
    import time
    from colorama import Fore, Style
    from dipy.tracking import utils
    from dipy.tracking.streamline import Streamlines, select_by_rois
    from dipy.tracking.local_tracking import LocalTracking, ParticleFilteringTracking
    from dipy.direction import (ProbabilisticDirectionGetter, ClosestPeakDirectionGetter,
                                DeterministicMaximumDirectionGetter)

    start = time.time()

    B0_mask_data = nib.load(B0_mask).get_fdata()

    if waymask:
        waymask_data = np.asarray(nib.load(waymask).dataobj).astype('bool')

    # Commence Ensemble Tractography
    parcel_vec = list(np.ones(len(parcels)).astype('bool'))
    streamlines = nib.streamlines.array_sequence.ArraySequence()

    circuit_ix = 0
    stream_counter = 0
    while int(stream_counter) < int(target_samples):
        for curv_thr in curv_thr_list:
            print("%s%s" % ('Curvature: ', curv_thr))

            # Instantiate DirectionGetter
            if directget == 'prob':
                dg = ProbabilisticDirectionGetter.from_shcoeff(mod_fit, max_angle=float(curv_thr), sphere=sphere,
                                                               min_separation_angle=min_separation_angle)
            elif directget == 'clos':
                dg = ClosestPeakDirectionGetter.from_shcoeff(mod_fit, max_angle=float(curv_thr), sphere=sphere,
                                                             min_separation_angle=min_separation_angle)
            elif directget == 'det':
                dg = DeterministicMaximumDirectionGetter.from_shcoeff(mod_fit, max_angle=float(curv_thr), sphere=sphere,
                                                                      min_separation_angle=min_separation_angle)
            else:
                raise ValueError('ERROR: No valid direction getter(s) specified.')

            for step in step_list:
                print("%s%s" % ('Step: ', step))

                # Perform wm-gm interface seeding, using n_seeds at a time
                seeds = utils.random_seeds_from_mask(atlas_data_wm_gm_int > 0, seeds_count=n_seeds_per_iter,
                                                     seed_count_per_voxel=False, affine=np.eye(4))
                if len(seeds) == 0:
                    raise RuntimeWarning('Warning: No valid seed points found in wm-gm interface...')

                # print(seeds)

                # Perform tracking
                if track_type == 'local':
                    streamline_generator = LocalTracking(dg, tiss_classifier, seeds, np.eye(4),
                                                         max_cross=int(maxcrossing), maxlen=int(max_length),
                                                         step_size=float(step), fixedstep=False, return_all=True)
                elif track_type == 'particle':
                    streamline_generator = ParticleFilteringTracking(dg, tiss_classifier, seeds, np.eye(4),
                                                                     max_cross=int(maxcrossing),
                                                                     step_size=float(step),
                                                                     maxlen=int(max_length),
                                                                     pft_back_tracking_dist=pft_back_tracking_dist,
                                                                     pft_front_tracking_dist=pft_front_tracking_dist,
                                                                     particle_count=particle_count,
                                                                     return_all=True)
                else:
                    raise ValueError('ERROR: No valid tracking method(s) specified.')

                # Filter resulting streamlines by those that stay entirely inside the brain
                roi_proximal_streamlines = utils.target(streamline_generator, np.eye(4), B0_mask_data,
                                                        include=True)

                # Filter resulting streamlines by roi-intersection characteristics
                roi_proximal_streamlines = Streamlines(select_by_rois(roi_proximal_streamlines, affine=np.eye(4),
                                                                      rois=parcels, include=parcel_vec,
                                                                      mode='both_end',
                                                                      tol=roi_neighborhood_tol))

                print("%s%s" % ('Filtering by: \nnode intersection: ', len(roi_proximal_streamlines)))

                if str(min_length) != '0':
                    roi_proximal_streamlines = nib.streamlines.array_sequence.ArraySequence([s for s in
                                                                                             roi_proximal_streamlines
                                                                                             if len(s) >=
                                                                                             float(min_length)])

                    print("%s%s" % ('Minimum length criterion: ', len(roi_proximal_streamlines)))

                if waymask:
                    roi_proximal_streamlines = roi_proximal_streamlines[utils.near_roi(roi_proximal_streamlines,
                                                                                       np.eye(4),
                                                                                       waymask_data,
                                                                                       tol=roi_neighborhood_tol,
                                                                                       mode='any')]
                    print("%s%s" % ('Waymask proximity: ', len(roi_proximal_streamlines)))

                out_streams = [s.astype('float32') for s in roi_proximal_streamlines]
                streamlines.extend(out_streams)
                stream_counter = stream_counter + len(out_streams)

                # Cleanup memory
                del seeds, roi_proximal_streamlines, streamline_generator, out_streams
                gc.collect()
            del dg

        circuit_ix = circuit_ix + 1
        print("%s%s%s%s%s%s" % ('Completed Hyperparameter Circuit: ', circuit_ix,
                                '\nCumulative Streamline Count: ', Fore.CYAN, stream_counter, "\n"))
        print(Style.RESET_ALL)

    print('Tracking Complete:\n', str(time.time() - start))

    return streamlines
コード例 #9
0
ファイル: segmentation.py プロジェクト: soichih/pyAFQ
def segment(fdata,
            fbval,
            fbvec,
            streamlines,
            bundles,
            reg_template=None,
            mapping=None,
            as_generator=True,
            clip_to_roi=True,
            **reg_kwargs):
    """
    Segment streamlines into bundles.

    Parameters
    ----------
    fdata, fbval, fbvec : str
        Full path to data, bvals, bvecs

    streamlines : list of 2D arrays
        Each array is a streamline, shape (3, N).

    bundles: dict
        The format is something like::

             {'name': {'ROIs':[img, img], 'rules':[True, True]}}

    reg_template : str or nib.Nifti1Image, optional.
        Template to use for registration (defaults to the MNI T2)

    mapping : DiffeomorphicMap object, str or nib.Nifti1Image, optional
        A mapping between DWI space and a template. Defaults to generate this.

    as_generator : bool, optional
        Whether to generate the streamlines here, or return generators.
        Default: True.

    clip_to_roi : bool, optional
        Whether to clip the streamlines between the ROIs
    """
    img, data, gtab, mask = ut.prepare_data(fdata, fbval, fbvec)
    xform_sl = [
        s for s in dtu.move_streamlines(streamlines, np.linalg.inv(img.affine))
    ]

    if reg_template is None:
        reg_template = dpd.read_mni_template()

    if mapping is None:
        mapping = reg.syn_register_dwi(fdata,
                                       gtab,
                                       template=reg_template,
                                       **reg_kwargs)

    if isinstance(mapping, str) or isinstance(mapping, nib.Nifti1Image):
        mapping = reg.read_mapping(mapping, img, reg_template)

    fiber_groups = {}
    for bundle in bundles:
        select_sl = xform_sl
        for ROI, rule in zip(bundles[bundle]['ROIs'],
                             bundles[bundle]['rules']):
            data = ROI.get_data()
            warped_ROI = patch_up_roi(
                mapping.transform_inverse(data, interpolation='nearest'))
            # This function requires lists as inputs:
            select_sl = dts.select_by_rois(select_sl,
                                           [warped_ROI.astype(bool)], [rule])
        # Next, we reorient each streamline according to an ARBITRARY, but
        # CONSISTENT order. To do this, we use the first ROI for which the rule
        # is True as the first one to pass through, and the last ROI for which
        # the rule is True as the last one to pass through:

        # Indices where the 'rule' is True:
        idx = np.where(bundles[bundle]['rules'])

        orient_ROIs = [
            bundles[bundle]['ROIs'][idx[0][0]],
            bundles[bundle]['ROIs'][idx[0][-1]]
        ]

        select_sl = dts.orient_by_rois(select_sl,
                                       orient_ROIs[0].get_data(),
                                       orient_ROIs[1].get_data(),
                                       as_generator=True)

        #  XXX Implement clipping to the ROIs
        #  if clip_to_roi:
        #    dts.clip()

        if as_generator:
            fiber_groups[bundle] = select_sl
        else:
            fiber_groups[bundle] = list(select_sl)

    return fiber_groups
コード例 #10
0
def run_tracking(step_curv_combinations, recon_path, n_seeds_per_iter,
                 directget, maxcrossing, max_length, pft_back_tracking_dist,
                 pft_front_tracking_dist, particle_count, roi_neighborhood_tol,
                 waymask, min_length, track_type, min_separation_angle, sphere,
                 tiss_class, tissues4d, cache_dir):

    import gc
    import os
    import h5py
    from dipy.tracking import utils
    from dipy.tracking.streamline import select_by_rois
    from dipy.tracking.local_tracking import LocalTracking, \
        ParticleFilteringTracking
    from dipy.direction import (ProbabilisticDirectionGetter,
                                ClosestPeakDirectionGetter,
                                DeterministicMaximumDirectionGetter)
    from nilearn.image import index_img
    from pynets.dmri.track import prep_tissues
    from nibabel.streamlines.array_sequence import ArraySequence
    from nipype.utils.filemanip import copyfile, fname_presuffix

    recon_path_tmp_path = fname_presuffix(recon_path,
                                          suffix=f"_{step_curv_combinations}",
                                          newpath=cache_dir)
    copyfile(recon_path, recon_path_tmp_path, copy=True, use_hardlink=False)

    if waymask is not None:
        waymask_tmp_path = fname_presuffix(waymask,
                                           suffix=f"_{step_curv_combinations}",
                                           newpath=cache_dir)
        copyfile(waymask, waymask_tmp_path, copy=True, use_hardlink=False)
    else:
        waymask_tmp_path = None

    tissue_img = nib.load(tissues4d)

    # Order:
    B0_mask = index_img(tissue_img, 0)
    atlas_img = index_img(tissue_img, 1)
    atlas_data_wm_gm_int = index_img(tissue_img, 2)
    t1w2dwi = index_img(tissue_img, 3)
    gm_in_dwi = index_img(tissue_img, 4)
    vent_csf_in_dwi = index_img(tissue_img, 5)
    wm_in_dwi = index_img(tissue_img, 6)

    tiss_classifier = prep_tissues(t1w2dwi, gm_in_dwi, vent_csf_in_dwi,
                                   wm_in_dwi, tiss_class, B0_mask)

    B0_mask_data = np.asarray(B0_mask.dataobj).astype("bool")
    atlas_data = np.array(atlas_img.dataobj).astype("uint16")
    atlas_data_wm_gm_int_data = np.asarray(
        atlas_data_wm_gm_int.dataobj).astype("bool").astype("int16")

    # Build mask vector from atlas for later roi filtering
    parcels = []
    i = 0
    intensities = [i for i in np.unique(atlas_data) if i != 0]
    for roi_val in intensities:
        parcels.append(atlas_data == roi_val)
        i += 1

    del atlas_data

    parcel_vec = list(np.ones(len(parcels)).astype("bool"))

    with h5py.File(recon_path_tmp_path, 'r+') as hf:
        mod_fit = hf['reconstruction'][:].astype('float32')
    hf.close()

    print("%s%s" % ("Curvature: ", step_curv_combinations[1]))

    # Instantiate DirectionGetter
    if directget == "prob" or directget == "probabilistic":
        dg = ProbabilisticDirectionGetter.from_shcoeff(
            mod_fit,
            max_angle=float(step_curv_combinations[1]),
            sphere=sphere,
            min_separation_angle=min_separation_angle,
        )
    elif directget == "clos" or directget == "closest":
        dg = ClosestPeakDirectionGetter.from_shcoeff(
            mod_fit,
            max_angle=float(step_curv_combinations[1]),
            sphere=sphere,
            min_separation_angle=min_separation_angle,
        )
    elif directget == "det" or directget == "deterministic":
        maxcrossing = 1
        dg = DeterministicMaximumDirectionGetter.from_shcoeff(
            mod_fit,
            max_angle=float(step_curv_combinations[1]),
            sphere=sphere,
            min_separation_angle=min_separation_angle,
        )
    else:
        raise ValueError("ERROR: No valid direction getter(s) specified.")

    print("%s%s" % ("Step: ", step_curv_combinations[0]))

    # Perform wm-gm interface seeding, using n_seeds at a time
    seeds = utils.random_seeds_from_mask(
        atlas_data_wm_gm_int_data > 0,
        seeds_count=n_seeds_per_iter,
        seed_count_per_voxel=False,
        affine=np.eye(4),
    )
    if len(seeds) == 0:
        print(
            UserWarning("No valid seed points found in wm-gm "
                        "interface..."))
        return None

    # print(seeds)

    # Perform tracking
    if track_type == "local":
        streamline_generator = LocalTracking(
            dg,
            tiss_classifier,
            seeds,
            np.eye(4),
            max_cross=int(maxcrossing),
            maxlen=int(max_length),
            step_size=float(step_curv_combinations[0]),
            fixedstep=False,
            return_all=True,
        )
    elif track_type == "particle":
        streamline_generator = ParticleFilteringTracking(
            dg,
            tiss_classifier,
            seeds,
            np.eye(4),
            max_cross=int(maxcrossing),
            step_size=float(step_curv_combinations[0]),
            maxlen=int(max_length),
            pft_back_tracking_dist=pft_back_tracking_dist,
            pft_front_tracking_dist=pft_front_tracking_dist,
            particle_count=particle_count,
            return_all=True,
        )
    else:
        try:
            raise ValueError("ERROR: No valid tracking method(s) specified.")
        except ValueError:
            import sys
            sys.exit(0)

    # Filter resulting streamlines by those that stay entirely
    # inside the brain
    try:
        roi_proximal_streamlines = utils.target(streamline_generator,
                                                np.eye(4),
                                                B0_mask_data,
                                                include=True)
    except BaseException:
        print('No streamlines found inside the brain! ' 'Check registrations.')
        return None

    # Filter resulting streamlines by roi-intersection
    # characteristics

    try:
        roi_proximal_streamlines = \
            nib.streamlines.array_sequence.ArraySequence(
                select_by_rois(
                    roi_proximal_streamlines,
                    affine=np.eye(4),
                    rois=parcels,
                    include=parcel_vec,
                    mode="%s" % ("any" if waymask is not None else
                                 "both_end"),
                    tol=roi_neighborhood_tol,
                )
            )
        print("%s%s" % ("Filtering by: \nNode intersection: ",
                        len(roi_proximal_streamlines)))
    except BaseException:
        print('No streamlines found to connect any parcels! '
              'Check registrations.')
        return None

    try:
        roi_proximal_streamlines = nib.streamlines. \
            array_sequence.ArraySequence(
            [
                s for s in roi_proximal_streamlines
                if len(s) >= float(min_length)
            ]
        )
        print(f"Minimum fiber length >{min_length}mm: "
              f"{len(roi_proximal_streamlines)}")
    except BaseException:
        print('No streamlines remaining after minimal length criterion.')
        return None

    if waymask is not None and os.path.isfile(waymask_tmp_path):
        from nilearn.image import math_img
        mask = math_img("img > 0.0075", img=nib.load(waymask_tmp_path))
        waymask_data = np.asarray(mask.dataobj).astype("bool")
        try:
            roi_proximal_streamlines = roi_proximal_streamlines[utils.near_roi(
                roi_proximal_streamlines,
                np.eye(4),
                waymask_data,
                tol=roi_neighborhood_tol,
                mode="all")]
            print("%s%s" %
                  ("Waymask proximity: ", len(roi_proximal_streamlines)))
        except BaseException:
            print('No streamlines remaining in waymask\'s vacinity.')
            return None

    out_streams = [s.astype("float32") for s in roi_proximal_streamlines]

    del dg, seeds, roi_proximal_streamlines, streamline_generator, \
        atlas_data_wm_gm_int_data, mod_fit, B0_mask_data

    os.remove(recon_path_tmp_path)
    gc.collect()

    try:
        return ArraySequence(out_streams)
    except BaseException:
        return None
コード例 #11
0
        # obtain that data array as bool
        sphereNifti = WMA_pyFuncs.createSphere(testRadius, testCentroid,
                                               testT1)
        # add that and a True to the list vector for each
        roisData.append(sphereNifti.get_fdata().astype(bool))
        roisNifti.append(sphereNifti)
        # randomly select include or exclude
        include.append(bool(random.getrandbits(1)))
        operations.append('any')

    # start timing
    t1_start = time.process_time()
    # specify segmentation
    dipySegmented = select_by_rois(testTractogram.streamlines,
                                   testT1.affine,
                                   roisData,
                                   include,
                                   mode='any')
    # actually perform segmentation and get count (cant do indexes here for whatever reason)
    dipyCount = len(list(dipySegmented))
    # stop time
    t1_stop = time.process_time()
    # get the elapsed time
    dipyTime = t1_stop - t1_start

    #restart time
    t1_start = time.process_time()
    #perform segmentation again, but with the modified version
    #for a valid comparison between these methods we have to split into two operations
    #since select_by_rois implicitly treats multiple operations in a fairly
    #specific modal fashion (https://github.com/dipy/dipy/blob/8898fc962d5aaf7f7cdbf82b027054070fcef49d/dipy/tracking/streamline.py#L240-L243)
コード例 #12
0
ファイル: segmentation.py プロジェクト: yeatmanlab/pyAFQ
def segment(fdata, fbval, fbvec, streamlines, bundles,
            reg_template=None, mapping=None, as_generator=True, **reg_kwargs):
    """

    generate : bool
        Whether to generate the streamlines here, or return generators.

    reg_template : template to use for registration (defaults to the MNI T2)

    bundles: dict
        The format is something like::

             {'name': {'ROIs':[img, img], 'rules':[True, True]}}


    """
    img, data, gtab, mask = ut.prepare_data(fdata, fbval, fbvec)
    xform_sl = [s for s in dtu.move_streamlines(streamlines,
                                                np.linalg.inv(img.affine))]

    if reg_template is None:
        reg_template = dpd.read_mni_template()

    if mapping is None:
        mapping = reg.syn_register_dwi(fdata, gtab, template=reg_template,
                                       **reg_kwargs)

    if isinstance(mapping, str) or isinstance(mapping, nib.Nifti1Image):
        mapping = reg.read_mapping(mapping, img, reg_template)

    fiber_groups = {}
    for bundle in bundles:
        select_sl = xform_sl
        for ROI, rule in zip(bundles[bundle]['ROIs'],
                             bundles[bundle]['rules']):
            data = ROI.get_data()
            warped_ROI = patch_up_roi(mapping.transform_inverse(
                data,
                interpolation='nearest'))
            # This function requires lists as inputs:
            select_sl = dts.select_by_rois(select_sl,
                                           [warped_ROI.astype(bool)],
                                           [rule])
        # Next, we reorient each streamline according to an ARBITRARY, but
        # CONSISTENT order. To do this, we use the first ROI for which the rule
        # is True as the first one to pass through, and the last ROI for which
        # the rule is True as the last one to pass through:

        # Indices where the 'rule' is True:
        idx = np.where(bundles[bundle]['rules'])

        orient_ROIs = [bundles[bundle]['ROIs'][idx[0][0]],
                       bundles[bundle]['ROIs'][idx[0][-1]]]

        select_sl = dts.orient_by_rois(select_sl,
                                       orient_ROIs[0].get_data(),
                                       orient_ROIs[1].get_data(),
                                       in_place=True)
        if as_generator:
            fiber_groups[bundle] = select_sl
        else:
            fiber_groups[bundle] = list(select_sl)

    return fiber_groups
コード例 #13
0
def track_ensemble(target_samples, atlas_data_wm_gm_int, parcels, parcel_vec, mod_fit,
                   tiss_classifier, sphere, directget, curv_thr_list, step_list, track_type, maxcrossing, max_length,
                   n_seeds_per_iter=200):
    from colorama import Fore, Style
    from dipy.tracking import utils
    from dipy.tracking.streamline import Streamlines, select_by_rois
    from dipy.tracking.local import LocalTracking, ParticleFilteringTracking
    from dipy.direction import ProbabilisticDirectionGetter, BootDirectionGetter, ClosestPeakDirectionGetter, DeterministicMaximumDirectionGetter

    # Commence Ensemble Tractography
    streamlines = nib.streamlines.array_sequence.ArraySequence()
    ix = 0
    circuit_ix = 0
    stream_counter = 0
    while int(stream_counter) < int(target_samples):
        for curv_thr in curv_thr_list:
            print("%s%s" % ('Curvature: ', curv_thr))

            # Instantiate DirectionGetter
            if directget == 'prob':
                dg = ProbabilisticDirectionGetter.from_shcoeff(mod_fit, max_angle=float(curv_thr),
                                                               sphere=sphere)
            elif directget == 'boot':
                dg = BootDirectionGetter.from_shcoeff(mod_fit, max_angle=float(curv_thr),
                                                      sphere=sphere)
            elif directget == 'closest':
                dg = ClosestPeakDirectionGetter.from_shcoeff(mod_fit, max_angle=float(curv_thr),
                                                             sphere=sphere)
            elif directget == 'det':
                dg = DeterministicMaximumDirectionGetter.from_shcoeff(mod_fit, max_angle=float(curv_thr),
                                                                      sphere=sphere)
            else:
                raise ValueError('ERROR: No valid direction getter(s) specified.')

            for step in step_list:
                print("%s%s" % ('Step: ', step))
                # Perform wm-gm interface seeding, using n_seeds at a time
                seeds = utils.random_seeds_from_mask(atlas_data_wm_gm_int > 0, seeds_count=n_seeds_per_iter,
                                                     seed_count_per_voxel=False, affine=np.eye(4))
                if len(seeds) == 0:
                    raise RuntimeWarning('Warning: No valid seed points found in wm-gm interface...')

                print(seeds)
                # Perform tracking
                if track_type == 'local':
                    streamline_generator = LocalTracking(dg, tiss_classifier, seeds, np.eye(4),
                                                         max_cross=int(maxcrossing), maxlen=int(max_length),
                                                         step_size=float(step), return_all=True)
                elif track_type == 'particle':
                    streamline_generator = ParticleFilteringTracking(dg, tiss_classifier, seeds, np.eye(4),
                                                                     max_cross=int(maxcrossing),
                                                                     step_size=float(step),
                                                                     maxlen=int(max_length),
                                                                     pft_back_tracking_dist=2,
                                                                     pft_front_tracking_dist=1,
                                                                     particle_count=15, return_all=True)
                else:
                    raise ValueError('ERROR: No valid tracking method(s) specified.')

                # Filter resulting streamlines by roi-intersection characteristics
                streamlines_more = Streamlines(select_by_rois(streamline_generator, parcels, parcel_vec.astype('bool'),
                                                              mode='any', affine=np.eye(4), tol=8))

                # Repeat process until target samples condition is met
                ix = ix + 1
                for s in streamlines_more:
                    stream_counter = stream_counter + len(s)
                    streamlines.append(s)
                    if int(stream_counter) >= int(target_samples):
                        break
                    else:
                        continue

        circuit_ix = circuit_ix + 1
        print("%s%s%s%s%s" % ('Completed hyperparameter circuit: ', circuit_ix, '...\nCumulative Streamline Count: ',
                              Fore.CYAN, stream_counter))
        print(Style.RESET_ALL)

    print('\n')
    return streamlines