def test_ProbabilisticDirectionGetter():
    # Test the constructors and errors of the ProbabilisticDirectionGetter

    class SillyModel(SphHarmModel):

        sh_order = 4

        def fit(self, data, mask=None):
            coeff = np.zeros(data.shape[:-1] + (15, ))
            return SphHarmFit(self, coeff, mask=None)

    model = SillyModel(gtab=None)
    data = np.zeros((3, 3, 3, 7))
    fit = model.fit(data)

    # Sample point and direction
    point = np.zeros(3)
    dir = unit_octahedron.vertices[0].copy()

    # make a dg from a fit
    dg = ProbabilisticDirectionGetter.from_shcoeff(fit.shm_coeff, 90,
                                                   unit_octahedron)
    state = dg.get_direction(point, dir)
    npt.assert_equal(state, 1)

    # Make a dg from a pmf
    N = unit_octahedron.theta.shape[0]
    pmf = np.zeros((3, 3, 3, N))
    dg = ProbabilisticDirectionGetter.from_pmf(pmf, 90, unit_octahedron)
    state = dg.get_direction(point, dir)
    npt.assert_equal(state, 1)

    # pmf shape must match sphere
    bad_pmf = pmf[..., 1:]
    npt.assert_raises(ValueError, ProbabilisticDirectionGetter.from_pmf,
                      bad_pmf, 90, unit_octahedron)

    # pmf must have 4 dimensions
    bad_pmf = pmf[0, ...]
    npt.assert_raises(ValueError, ProbabilisticDirectionGetter.from_pmf,
                      bad_pmf, 90, unit_octahedron)
    # pmf cannot have negative values
    pmf[0, 0, 0, 0] = -1
    npt.assert_raises(ValueError, ProbabilisticDirectionGetter.from_pmf, pmf,
                      90, unit_octahedron)

    # Check basis_type keyword
    ProbabilisticDirectionGetter.from_shcoeff(fit.shm_coeff,
                                              90,
                                              unit_octahedron,
                                              pmf_threshold=0.1,
                                              basis_type="mrtrix")

    npt.assert_raises(ValueError,
                      ProbabilisticDirectionGetter.from_shcoeff,
                      fit.shm_coeff,
                      90,
                      unit_octahedron,
                      pmf_threshold=0.1,
                      basis_type="not a basis")
def test_ProbabilisticDirectionGetter():
    # Test the constructors and errors of the ProbabilisticDirectionGetter

    class SillyModel(SphHarmModel):

        sh_order = 4

        def fit(self, data, mask=None):
            coeff = np.zeros(data.shape[:-1] + (15,))
            return SphHarmFit(self, coeff, mask=None)

    model = SillyModel(gtab=None)
    data = np.zeros((3, 3, 3, 7))

    # Test if the tracking works on different dtype of the same data.
    for dtype in [np.float32, np.float64]:
        fit = model.fit(data.astype(dtype))

        # Sample point and direction
        point = np.zeros(3)
        dir = unit_octahedron.vertices[0].copy()

        # make a dg from a fit
        dg = ProbabilisticDirectionGetter.from_shcoeff(fit.shm_coeff, 90,
                                                       unit_octahedron)
        state = dg.get_direction(point, dir)
        npt.assert_equal(state, 1)

        # Make a dg from a pmf
        N = unit_octahedron.theta.shape[0]
        pmf = np.zeros((3, 3, 3, N))
        dg = ProbabilisticDirectionGetter.from_pmf(pmf, 90, unit_octahedron)
        state = dg.get_direction(point, dir)
        npt.assert_equal(state, 1)

        # pmf shape must match sphere
        bad_pmf = pmf[..., 1:]
        npt.assert_raises(ValueError, ProbabilisticDirectionGetter.from_pmf,
                          bad_pmf, 90, unit_octahedron)

        # pmf must have 4 dimensions
        bad_pmf = pmf[0, ...]
        npt.assert_raises(ValueError, ProbabilisticDirectionGetter.from_pmf,
                          bad_pmf, 90, unit_octahedron)
        # pmf cannot have negative values
        pmf[0, 0, 0, 0] = -1
        npt.assert_raises(ValueError, ProbabilisticDirectionGetter.from_pmf,
                          pmf, 90, unit_octahedron)

        # Check basis_type keyword
        dg = ProbabilisticDirectionGetter.from_shcoeff(fit.shm_coeff, 90,
                                                       unit_octahedron,
                                                       basis_type="mrtrix")

        npt.assert_raises(ValueError,
                          ProbabilisticDirectionGetter.from_shcoeff,
                          fit.shm_coeff, 90, unit_octahedron,
                          basis_type="not a basis")
Exemple #3
0
def test_ProbabilisticOdfWeightedTracker():
    """This tests that the Probabalistic Direction Getter plays nice
    LocalTracking and produces reasonable streamlines in a simple example.
    """
    sphere = HemiSphere.from_sphere(unit_octahedron)

    # A simple image with three possible configurations, a vertical tract,
    # a horizontal tract and a crossing
    pmf_lookup = np.array([[0., 0., 1.], [1., 0., 0.], [0., 1., 0.],
                           [.5, .5, 0.]])
    simple_image = np.array([
        [0, 1, 0, 0, 0, 0],
        [0, 1, 0, 0, 0, 0],
        [0, 3, 2, 2, 2, 0],
        [0, 1, 0, 0, 0, 0],
        [0, 1, 0, 0, 0, 0],
    ])

    simple_image = simple_image[..., None]
    pmf = pmf_lookup[simple_image]

    seeds = [np.array([1., 1., 0.])] * 30

    mask = (simple_image > 0).astype(float)
    tc = ThresholdTissueClassifier(mask, .5)

    dg = ProbabilisticDirectionGetter.from_pmf(pmf, 90, sphere)
    streamlines = LocalTracking(dg, tc, seeds, np.eye(4), 1.)

    expected = [
        np.array([[0., 1., 0.], [1., 1., 0.], [2., 1., 0.], [2., 2., 0.],
                  [2., 3., 0.], [2., 4., 0.], [2., 5., 0.]]),
        np.array([[0., 1., 0.], [1., 1., 0.], [2., 1., 0.], [3., 1., 0.],
                  [4., 1., 0.]])
    ]

    def allclose(x, y):
        return x.shape == y.shape and np.allclose(x, y)

    path = [False, False]
    for sl in streamlines:
        if allclose(sl, expected[0]):
            path[0] = True
        elif allclose(sl, expected[1]):
            path[1] = True
        else:
            raise AssertionError()
    npt.assert_(all(path))

    # The first path is not possible if 90 degree turns are excluded
    dg = ProbabilisticDirectionGetter.from_pmf(pmf, 80, sphere)
    streamlines = LocalTracking(dg, tc, seeds, np.eye(4), 1.)

    for sl in streamlines:
        npt.assert_(np.allclose(sl, expected[1]))
def tracking_prob(dir_src, dir_out, verbose=False):

    wm_name = 'wm_mask_' + par_b_tag + '_' + par_dim_tag + '.nii.gz'
    wm_mask, affine = load_nifti(pjoin(dir_src, wm_name), verbose)

    sh_name = 'sh_' + par_b_tag + '_' + par_dim_tag + '.nii.gz'
    sh, _ = load_nifti(pjoin(dir_src, sh_name), verbose)

    sphere = get_sphere('symmetric724') 

    classifier = ThresholdTissueClassifier(wm_mask.astype('f8'), .5)
    classifier = BinaryTissueClassifier(wm_mask)
    max_dg = ProbabilisticDirectionGetter.from_shcoeff(sh, max_angle=par_trk_max_angle, sphere=sphere)
    seeds = utils.seeds_from_mask(wm_mask, density=2, affine=affine)
    streamlines = LocalTracking(max_dg, classifier, seeds, affine, step_size=par_trk_step_size)
    streamlines = list(streamlines)

    trk_name = 'tractogram_' + par_b_tag + '_' + par_dim_tag + '_' + par_trk_prob_tag + '.trk'
    trk_out = os.path.join(dir_out, trk_name)
 
    save_trk(trk_out, streamlines, affine, wm_mask.shape)

    dpy_out = trk_out.replace('.trk', '.dpy')
    dpy = Dpy(dpy_out, 'w')
    dpy.write_tracks(streamlines)
    dpy.close()
Exemple #5
0
def probal(Threshold=.2,
           data_list=None,
           seed='.',
           one_node=False,
           two_node=False):
    time0 = time.time()
    print("begin loading data, time:", time.time() - time0)

    data = data_list['DWI']
    affine = data_list['affine']
    img = data_list['img']
    labels = data_list['labels']
    gtab = data_list['gtab']
    head_mask = data_list['head_mask']

    if type(seed) != str:
        seed_mask = seed
    else:
        seed_mask = (labels == 2) * (head_mask == 1)

    white_matter = (labels == 2) * (head_mask == 1)
    seeds = utils.seeds_from_mask(seed_mask, affine, density=1)

    print("begin reconstruction, time:", time.time() - time0)
    response, ratio = auto_response_ssst(gtab, data, roi_radii=10, fa_thr=0.7)
    csd_model = ConstrainedSphericalDeconvModel(gtab, response, sh_order=6)
    csd_fit = csd_model.fit(data, mask=white_matter)

    csa_model = CsaOdfModel(gtab, sh_order=6)
    gfa = csa_model.fit(data, mask=white_matter).gfa
    stopping_criterion = ThresholdStoppingCriterion(gfa, Threshold)

    print("begin tracking, time:", time.time() - time0)
    fod = csd_fit.odf(small_sphere)
    pmf = fod.clip(min=0)
    prob_dg = ProbabilisticDirectionGetter.from_pmf(pmf,
                                                    max_angle=30.,
                                                    sphere=small_sphere)
    streamline_generator = LocalTracking(prob_dg,
                                         stopping_criterion,
                                         seeds,
                                         affine,
                                         step_size=.5)
    streamlines = Streamlines(streamline_generator)

    sft = StatefulTractogram(streamlines, img, Space.RASMM)

    if one_node or two_node:
        sft.to_vox()
        streamlines = reduct_seed_ROI(sft.streamlines, seed_mask, one_node,
                                      two_node)
        sft = StatefulTractogram(streamlines, img, Space.VOX)
        sft._vox_to_rasmm()

    print("begin saving, time:", time.time() - time0)

    output = 'tractogram_probabilistic.trk'
    save_trk(sft, output)

    print("finished, time:", time.time() - time0)
Exemple #6
0
def lambda_handler(event, context):

  #file logics
  try:
    event["local_debug"]
  except KeyError:
    event["local_debug"] = False
    pass

  if(event["local_debug"]):
    import cloudpickle as cp
    s3 = boto3.resource('s3')

    f = open("/outputs/gfa_example.cloudpickle","rb")
    shm_coeff, gfa, affine = cp.load(f)
    #shm_coeff, gfa, affine = joblib.load(f)
    f.close()

    f = open("/outputs/seeds.cloudpickle","rb")
    seeds = cp.load(f)
    #seeds = joblib.load(f)
    f.close()

    seeds = [seeds[0]]
  else:
    import pickle
    s3 = boto3.resource('s3')
    #shm_coeff, gfa, affine = cp.loads(\
    #  s3.Object(event["bucket_name"], event["gfa_key"]).get()["Body"].read()\
    #)
    local_file = '/tmp/temp_s3_obj.pkl'
    s3.Object(event["bucket_name"],\
      event["gfa_key"]).get_contents_to_filename( local_file )
    shm_coeff, gfa, affine = pickle.loads(\
      local_file
    )
    seeds = event["seeds"]
  
  print("tracking model")

  classifier = ThresholdTissueClassifier(gfa, .25)

  prob_dg = ProbabilisticDirectionGetter.from_shcoeff(shm_coeff, \
    max_angle=30.,sphere=default_sphere)
  streamlines = LocalTracking(prob_dg, classifier, seeds, affine, step_size=.5)

  #write and push
  if not event["local_debug"]:
    s3 = boto3.resource('s3')

    f = open("tmp/streamlines.pkl","wb")
    joblib.dump(streamlines,f,compress=True)
    f.close()
    f = open("tmp/streamlines.pkl","rb")
    s3.Object(event["output_bucket"], event["output_key"]).put(Body=f)
    f.close()
    pass
Exemple #7
0
def test_save_seeds():
    tissue = np.array([[2, 1, 1, 2, 1],
                       [2, 2, 1, 1, 2],
                       [1, 1, 1, 1, 1],
                       [1, 1, 1, 2, 2],
                       [0, 1, 1, 1, 2],
                       [0, 1, 1, 0, 2],
                       [1, 0, 1, 1, 1]])
    tissue = tissue[None]

    sphere = HemiSphere.from_sphere(unit_octahedron)
    pmf_lookup = np.array([[0., 0., 0., ],
                           [0., 0., 1.]])
    pmf = pmf_lookup[(tissue > 0).astype("int")]

    # Create a seeds along
    x = np.array([0., 0, 0, 0, 0, 0, 0])
    y = np.array([0., 1, 2, 3, 4, 5, 6])
    z = np.array([1., 1, 1, 0, 1, 1, 1])
    seeds = np.column_stack([x, y, z])

    # Set up tracking
    endpoint_mask = tissue == TissueTypes.ENDPOINT
    invalidpoint_mask = tissue == TissueTypes.INVALIDPOINT
    tc = ActTissueClassifier(endpoint_mask, invalidpoint_mask)
    dg = ProbabilisticDirectionGetter.from_pmf(pmf, 60, sphere)

    # valid streamlines only
    streamlines_generator = LocalTracking(direction_getter=dg,
                                          tissue_classifier=tc,
                                          seeds=seeds,
                                          affine=np.eye(4),
                                          step_size=1.,
                                          return_all=False,
                                          save_seeds=True)

    streamlines_not_all = iter(streamlines_generator)
    # Verifiy that seeds are returned by the LocalTracker
    _, seed = next(streamlines_not_all)
    npt.assert_equal(seed, seeds[0])
    _, seed = next(streamlines_not_all)
    npt.assert_equal(seed, seeds[1])
    # Verifiy that seeds are returned by the PFTTracker also
    pft_streamlines = ParticleFilteringTracking(direction_getter=dg,
                                                tissue_classifier=tc,
                                                seeds=seeds,
                                                affine=np.eye(4),
                                                step_size=1.,
                                                max_cross=1,
                                                return_all=False,
                                                save_seeds=True)
    streamlines = iter(pft_streamlines)
    _, seed = next(streamlines)
    npt.assert_equal(seed, seeds[0])
    _, seed = next(streamlines)
    npt.assert_equal(seed, seeds[1])
Exemple #8
0
    def _get_direction_getter(self,
                              strategy_name,
                              pam,
                              pmf_threshold=0.1,
                              max_angle=30.):
        """Get Tracking Direction Getter object.

        Parameters
        ----------
        strategy_name: str
            string representing direction getter name

        Returns
        -------
        direction_getter : instance of DirectionGetter
            Used to get directions for fiber tracking.

        """
        dg, msg = None, ''
        if strategy_name.lower() in ["deterministic", "det"]:
            msg = "Deterministic"
            dg = DeterministicMaximumDirectionGetter.from_shcoeff(
                pam.shm_coeff,
                sphere=pam.sphere,
                max_angle=max_angle,
                pmf_threshold=pmf_threshold)
        elif strategy_name.lower() in ["probabilistic", "prob"]:
            msg = "Probabilistic"
            dg = ProbabilisticDirectionGetter.from_shcoeff(
                pam.shm_coeff,
                sphere=pam.sphere,
                max_angle=max_angle,
                pmf_threshold=pmf_threshold)
        elif strategy_name.lower() in ["closestpeaks", "cp"]:
            msg = "ClosestPeaks"
            dg = ClosestPeakDirectionGetter.from_shcoeff(
                pam.shm_coeff,
                sphere=pam.sphere,
                max_angle=max_angle,
                pmf_threshold=pmf_threshold)
        elif strategy_name.lower() in [
                "eudx",
        ]:
            msg = "Eudx"
            dg = pam
        else:
            msg = "No direction getter defined. Deterministic"
            dg = DeterministicMaximumDirectionGetter.from_shcoeff(
                pam.shm_coeff,
                sphere=pam.sphere,
                max_angle=max_angle,
                pmf_threshold=pmf_threshold)

        logging.info('{0} direction getter strategy selected'.format(msg))
        return dg
Exemple #9
0
    def _get_direction_getter(self, strategy_name, pam, pmf_threshold,
                              max_angle):
        """Get Tracking Direction Getter object.

        Parameters
        ----------
        strategy_name : str
            String representing direction getter name.
        pam : instance of PeaksAndMetrics
            An object with ``gfa``, ``peak_directions``, ``peak_values``,
            ``peak_indices``, ``odf``, ``shm_coeffs`` as attributes.
        pmf_threshold : float
            Threshold for ODF functions.
        max_angle : float
            Maximum angle between streamline segments.

        Returns
        -------
        direction_getter : instance of DirectionGetter
            Used to get directions for fiber tracking.

        """
        dg, msg = None, ''
        if strategy_name.lower() in ["deterministic", "det"]:
            msg = "Deterministic"
            dg = DeterministicMaximumDirectionGetter.from_shcoeff(
                pam.shm_coeff,
                sphere=pam.sphere,
                max_angle=max_angle,
                pmf_threshold=pmf_threshold)
        elif strategy_name.lower() in ["probabilistic", "prob"]:
            msg = "Probabilistic"
            dg = ProbabilisticDirectionGetter.from_shcoeff(
                pam.shm_coeff,
                sphere=pam.sphere,
                max_angle=max_angle,
                pmf_threshold=pmf_threshold)
        elif strategy_name.lower() in ["closestpeaks", "cp"]:
            msg = "ClosestPeaks"
            dg = ClosestPeakDirectionGetter.from_shcoeff(
                pam.shm_coeff,
                sphere=pam.sphere,
                max_angle=max_angle,
                pmf_threshold=pmf_threshold)
        elif strategy_name.lower() in [
                "eudx",
        ]:
            msg = "Eudx"
            dg = pam
        else:
            msg = "No direction getter defined. Eudx"
            dg = pam

        logging.info('{0} direction getter strategy selected'.format(msg))
        return dg
Exemple #10
0
def PFT_tracking(name=None, data_path=None, output_path='.', Threshold=.20):

    time0 = time.time()
    print("begin loading data, time:", time.time() - time0)
    data, affine, img, labels, gtab, head_mask = get_data(name, data_path)

    seed_mask = (labels == 2) * (head_mask == 1)
    white_matter = (labels == 2) * (head_mask == 1)
    seeds = utils.seeds_from_mask(seed_mask, affine, density=1)

    print('begin reconstruction, time:', time.time() - time0)

    response, ratio = auto_response_ssst(gtab, data, roi_radii=10, fa_thr=0.7)
    csd_model = ConstrainedSphericalDeconvModel(gtab, response)
    csd_fit = csd_model.fit(data, mask=white_matter)

    csa_model = CsaOdfModel(gtab, sh_order=6)
    gfa = csa_model.fit(data, mask=white_matter).gfa

    stopping_criterion = ThresholdStoppingCriterion(gfa, Threshold)

    dg = ProbabilisticDirectionGetter.from_shcoeff(csd_fit.shm_coeff,
                                                   max_angle=20.,
                                                   sphere=default_sphere)

    #seed_mask = (labels == 2)
    #seed_mask[pve_wm_data < 0.5] = 0
    seeds = utils.seeds_from_mask(seed_mask, affine, density=1)

    #voxel_size = np.average(voxel_size[1:4])
    step_size = 0.2

    #cmc_criterion = CmcStoppingCriterion.from_pve(pve_wm_data,
    #                                              pve_gm_data,
    #                                             pve_csf_data,
    #                                             step_size=step_size,
    #                                              average_voxel_size=voxel_size)

    # Particle Filtering Tractography
    pft_streamline_generator = ParticleFilteringTracking(
        dg,
        stopping_criterion,
        seeds,
        affine,
        max_cross=1,
        step_size=step_size,
        maxlen=1000,
        pft_back_tracking_dist=2,
        pft_front_tracking_dist=1,
        particle_count=15,
        return_all=False)
    streamlines = Streamlines(pft_streamline_generator)
    sft = StatefulTractogram(streamlines, img, Space.RASMM)
    output = output_path + '/tractogram_pft_' + name + '.trk'
Exemple #11
0
    def _get_direction_getter(self, strategy_name, pam, pmf_threshold,
                              max_angle):
        """Get Tracking Direction Getter object.

        Parameters
        ----------
        strategy_name: str
            String representing direction getter name.
        pam: instance of PeaksAndMetrics
            An object with ``gfa``, ``peak_directions``, ``peak_values``,
            ``peak_indices``, ``odf``, ``shm_coeffs`` as attributes.
        pmf_threshold : float
            Threshold for ODF functions.
        max_angle : float
            Maximum angle between streamline segments.

        Returns
        -------
        direction_getter : instance of DirectionGetter
            Used to get directions for fiber tracking.

        """
        dg, msg = None, ''
        if strategy_name.lower() in ["deterministic", "det"]:
            msg = "Deterministic"
            dg = DeterministicMaximumDirectionGetter.from_shcoeff(
                pam.shm_coeff,
                sphere=pam.sphere,
                max_angle=max_angle,
                pmf_threshold=pmf_threshold)
        elif strategy_name.lower() in ["probabilistic", "prob"]:
            msg = "Probabilistic"
            dg = ProbabilisticDirectionGetter.from_shcoeff(
                pam.shm_coeff,
                sphere=pam.sphere,
                max_angle=max_angle,
                pmf_threshold=pmf_threshold)
        elif strategy_name.lower() in ["closestpeaks", "cp"]:
            msg = "ClosestPeaks"
            dg = ClosestPeakDirectionGetter.from_shcoeff(
                pam.shm_coeff,
                sphere=pam.sphere,
                max_angle=max_angle,
                pmf_threshold=pmf_threshold)
        elif strategy_name.lower() in ["eudx", ]:
            msg = "Eudx"
            dg = pam
        else:
            msg = "No direction getter defined. Eudx"
            dg = pam

        logging.info('{0} direction getter strategy selected'.format(msg))
        return dg
Exemple #12
0
    def _get_direction_getter(self, strategy_name, pam, pmf_threshold=0.1,
                              max_angle=30.):
        """Get Tracking Direction Getter object.

        Parameters
        ----------
        strategy_name: str
            string representing direction getter name

        Returns
        -------
        direction_getter : instance of DirectionGetter
            Used to get directions for fiber tracking.

        """
        dg, msg = None, ''
        if strategy_name.lower() in ["deterministic", "det"]:
            msg = "Deterministic"
            dg = DeterministicMaximumDirectionGetter.from_shcoeff(
                pam.shm_coeff,
                sphere=pam.sphere,
                max_angle=max_angle,
                pmf_threshold=pmf_threshold)
        elif strategy_name.lower() in ["probabilistic", "prob"]:
            msg = "Probabilistic"
            dg = ProbabilisticDirectionGetter.from_shcoeff(
                pam.shm_coeff,
                sphere=pam.sphere,
                max_angle=max_angle,
                pmf_threshold=pmf_threshold)
        elif strategy_name.lower() in ["closestpeaks", "cp"]:
            msg = "ClosestPeaks"
            dg = ClosestPeakDirectionGetter.from_shcoeff(
                pam.shm_coeff,
                sphere=pam.sphere,
                max_angle=max_angle,
                pmf_threshold=pmf_threshold)
        elif strategy_name.lower() in ["eudx", ]:
            msg = "Eudx"
            dg = pam
        else:
            msg = "No direction getter defined. Deterministic"
            dg = DeterministicMaximumDirectionGetter.from_shcoeff(
                pam.shm_coeff,
                sphere=pam.sphere,
                max_angle=max_angle,
                pmf_threshold=pmf_threshold)

        logging.info('{0} direction getter strategy selected'.format(msg))
        return dg
def tracking_prob(dir_src, dir_out, verbose=False):

    wm_name = 'wm_mask_' + par_b_tag + '_' + par_dim_tag + '.nii.gz'
    wm_mask, affine = load_nifti(pjoin(dir_src, wm_name), verbose)

    sh_name = 'sh_' + par_b_tag + '_' + par_dim_tag + '.nii.gz'
    sh, _ = load_nifti(pjoin(dir_src, sh_name), verbose)

    sphere = get_sphere('symmetric724') 

    classifier = ThresholdTissueClassifier(wm_mask.astype('f8'), .5)
    classifier = BinaryTissueClassifier(wm_mask)
    max_dg = ProbabilisticDirectionGetter.from_shcoeff(sh, max_angle=par_trk_max_angle, sphere=sphere)
    seeds = utils.seeds_from_mask(wm_mask, density=2, affine=affine)
    streamlines = LocalTracking(max_dg, classifier, seeds, affine, step_size=par_trk_step_size)
    streamlines = list(streamlines)

    trk_name = 'tractogram_' + par_b_tag + '_' + par_dim_tag + '_' + par_trk_prob_tag + '.trk'
    save_trk(pjoin(dir_out, trk_name), streamlines, affine, wm_mask.shape)
represent the FOD using a discrete sphere. This discrete FOD can be used by the
``ProbabilisticDirectionGetter`` as a PMF for sampling tracking directions. We
need to clip the FOD to use it as a PMF because the latter cannot have negative
values. Ideally, the FOD should be strictly positive, but because of noise
and/or model failures sometimes it can have negative values.
"""

from dipy.direction import ProbabilisticDirectionGetter
from dipy.data import small_sphere
from dipy.io.stateful_tractogram import Space, StatefulTractogram
from dipy.io.streamline import save_trk

fod = csd_fit.odf(small_sphere)
pmf = fod.clip(min=0)
prob_dg = ProbabilisticDirectionGetter.from_pmf(pmf,
                                                max_angle=30.,
                                                sphere=small_sphere)
streamline_generator = LocalTracking(prob_dg,
                                     stopping_criterion,
                                     seeds,
                                     affine,
                                     step_size=.5)
streamlines = Streamlines(streamline_generator)
sft = StatefulTractogram(streamlines, hardi_img, Space.RASMM)
save_trk(sft, "tractogram_probabilistic_dg_pmf.trk")

if has_fury:
    scene = window.Scene()
    scene.add(actor.line(streamlines, colormap.line_colors(streamlines)))
    window.record(scene,
                  out_path='tractogram_probabilistic_dg_pmf.png',
Exemple #15
0
def test_particle_filtering_tractography():
    """This tests that the ParticleFilteringTracking produces
    more streamlines connecting the gray matter than LocalTracking.
    """
    sphere = get_sphere('repulsion100')
    step_size = 0.2

    # Simple tissue masks
    simple_wm = np.array([[0, 0, 0, 0, 0, 0],
                          [0, 0, 1, 0, 0, 0],
                          [0, 1, 1, 1, 0, 0],
                          [0, 1, 1, 1, 0, 0],
                          [0, 0, 0, 0, 0, 0]])
    simple_wm = np.dstack([np.zeros(simple_wm.shape),
                           simple_wm,
                           simple_wm,
                           simple_wm,
                           np.zeros(simple_wm.shape)])
    simple_gm = np.array([[1, 1, 0, 0, 0, 0],
                          [1, 1, 0, 0, 0, 0],
                          [0, 1, 0, 0, 1, 0],
                          [0, 0, 0, 0, 1, 0],
                          [0, 0, 0, 0, 0, 0]])
    simple_gm = np.dstack([np.zeros(simple_gm.shape),
                           simple_gm,
                           simple_gm,
                           simple_gm,
                           np.zeros(simple_gm.shape)])
    simple_csf = np.ones(simple_wm.shape) - simple_wm - simple_gm
    tc = ActTissueClassifier.from_pve(simple_wm, simple_gm, simple_csf)
    seeds = seeds_from_mask(simple_wm, density=2)

    # Random pmf in every voxel
    shape_img = list(simple_wm.shape)
    shape_img.extend([sphere.vertices.shape[0]])
    np.random.seed(0)  # Random number generator initialization
    pmf = np.random.random(shape_img)

    # Test that PFT recover equal or more streamlines than localTracking
    dg = ProbabilisticDirectionGetter.from_pmf(pmf, 60, sphere)
    local_streamlines_generator = LocalTracking(dg, tc, seeds, np.eye(4),
                                                step_size, max_cross=1,
                                                return_all=False)
    local_streamlines = Streamlines(local_streamlines_generator)

    pft_streamlines_generator = ParticleFilteringTracking(
        dg, tc, seeds, np.eye(4), step_size, max_cross=1, return_all=False,
        pft_back_tracking_dist=1, pft_front_tracking_dist=0.5)
    pft_streamlines = Streamlines(pft_streamlines_generator)

    npt.assert_(np.array([len(pft_streamlines) > 0]))
    npt.assert_(np.array([len(pft_streamlines) >= len(local_streamlines)]))

    # Test that all points are equally spaced
    for l in [1, 2, 5, 10, 100]:
        pft_streamlines = ParticleFilteringTracking(dg, tc, seeds, np.eye(4),
                                                    step_size, max_cross=1,
                                                    return_all=True, maxlen=l)
        for s in pft_streamlines:
            for i in range(len(s) - 1):
                npt.assert_almost_equal(np.linalg.norm(s[i] - s[i + 1]),
                                        step_size)
    # Test that all points are within the image volume
    seeds = seeds_from_mask(np.ones(simple_wm.shape), density=1)
    pft_streamlines_generator = ParticleFilteringTracking(
        dg, tc, seeds, np.eye(4), step_size, max_cross=1, return_all=True)
    pft_streamlines = Streamlines(pft_streamlines_generator)

    for s in pft_streamlines:
        npt.assert_(np.all((s + 0.5).astype(int) >= 0))
        npt.assert_(np.all((s + 0.5).astype(int) < simple_wm.shape))

    # Test that the number of streamline return with return_all=True equal the
    # number of seeds places
    npt.assert_(np.array([len(pft_streamlines) == len(seeds)]))

    # Test non WM seed position
    seeds = [[0, 5, 4], [0, 0, 1], [50, 50, 50]]
    pft_streamlines_generator = ParticleFilteringTracking(
        dg, tc, seeds, np.eye(4), step_size, max_cross=1, return_all=True)
    pft_streamlines = Streamlines(pft_streamlines_generator)

    npt.assert_equal(len(pft_streamlines[0]), 3)  # INVALIDPOINT
    npt.assert_equal(len(pft_streamlines[1]), 3)  # ENDPOINT
    npt.assert_equal(len(pft_streamlines[2]), 1)  # OUTSIDEIMAGE

    # Test with wrong tissueclassifier type
    tc_bin = BinaryTissueClassifier(simple_wm)
    npt.assert_raises(ValueError,
                      lambda: ParticleFilteringTracking(dg, tc_bin, seeds,
                                                        np.eye(4), step_size))
    # Test with invalid back/front tracking distances
    npt.assert_raises(
        ValueError,
        lambda: ParticleFilteringTracking(dg, tc, seeds, np.eye(4), step_size,
                                          pft_back_tracking_dist=0,
                                          pft_front_tracking_dist=0))
    npt.assert_raises(
        ValueError,
        lambda: ParticleFilteringTracking(dg, tc, seeds, np.eye(4), step_size,
                                          pft_back_tracking_dist=-1))
    npt.assert_raises(
        ValueError,
        lambda: ParticleFilteringTracking(dg, tc, seeds, np.eye(4), step_size,
                                          pft_back_tracking_dist=0,
                                          pft_front_tracking_dist=-2))

    # Test with invalid affine shape
    npt.assert_raises(
        ValueError,
        lambda: ParticleFilteringTracking(dg, tc, seeds, np.eye(3), step_size))

    # Test with invalid maxlen
    npt.assert_raises(
        ValueError,
        lambda: ParticleFilteringTracking(dg, tc, seeds, np.eye(4), step_size,
                                          maxlen=0))
    npt.assert_raises(
        ValueError,
        lambda: ParticleFilteringTracking(dg, tc, seeds, np.eye(4), step_size,
                                          maxlen=-1))

    # Test with invalid particle count
    npt.assert_raises(
        ValueError,
        lambda: ParticleFilteringTracking(dg, tc, seeds, np.eye(4), step_size,
                                          particle_count=0))
    npt.assert_raises(
        ValueError,
        lambda: ParticleFilteringTracking(dg, tc, seeds, np.eye(4), step_size,
                                          particle_count=-1))

    # Test reproducibility
    tracking_1 = Streamlines(ParticleFilteringTracking(dg, tc, seeds, np.eye(4),
                                                       step_size,
                                                       random_seed=0)).data
    tracking_2 = Streamlines(ParticleFilteringTracking(dg, tc, seeds, np.eye(4),
                                                       step_size,
                                                       random_seed=0)).data
    npt.assert_equal(tracking_1, tracking_2)
Exemple #16
0
def test_stop_conditions():
    """This tests that the Local Tracker behaves as expected for the
    following tissue types.
    """
    # StreamlineStatus.TRACKPOINT = 1
    # StreamlineStatus.ENDPOINT = 2
    # StreamlineStatus.INVALIDPOINT = 0
    tissue = np.array([[2, 1, 1, 2, 1], [2, 2, 1, 1, 2], [1, 1, 1, 1, 1],
                       [1, 1, 1, 2, 2], [0, 1, 1, 1, 2], [0, 1, 1, 0, 2],
                       [1, 0, 1, 1, 1], [2, 1, 2, 0, 0]])
    tissue = tissue[None]

    sphere = HemiSphere.from_sphere(unit_octahedron)
    pmf_lookup = np.array([[
        0.,
        0.,
        0.,
    ], [0., 0., 1.]])
    pmf = pmf_lookup[(tissue > 0).astype("int")]

    # Create a seeds along
    x = np.array([0., 0, 0, 0, 0, 0, 0, 0])
    y = np.array([0., 1, 2, 3, 4, 5, 6, 7])
    z = np.array([1., 1, 1, 0, 1, 1, 1, 1])
    seeds = np.column_stack([x, y, z])

    # Set up tracking
    endpoint_mask = tissue == StreamlineStatus.ENDPOINT
    invalidpoint_mask = tissue == StreamlineStatus.INVALIDPOINT
    sc = ActStoppingCriterion(endpoint_mask, invalidpoint_mask)
    dg = ProbabilisticDirectionGetter.from_pmf(pmf, 60, sphere)

    # valid streamlines only
    streamlines_generator = LocalTracking(direction_getter=dg,
                                          stopping_criterion=sc,
                                          seeds=seeds,
                                          affine=np.eye(4),
                                          step_size=1.,
                                          return_all=False)
    streamlines_not_all = iter(streamlines_generator)

    # all streamlines
    streamlines_all_generator = LocalTracking(direction_getter=dg,
                                              stopping_criterion=sc,
                                              seeds=seeds,
                                              affine=np.eye(4),
                                              step_size=1.,
                                              return_all=True)
    streamlines_all = iter(streamlines_all_generator)

    # Check that the first streamline stops at 1 and 2 (ENDPOINT)
    y = 0
    sl = next(streamlines_not_all)
    npt.assert_equal(sl[0], [0, y, 1])
    npt.assert_equal(sl[-1], [0, y, 2])
    npt.assert_equal(len(sl), 2)

    sl = next(streamlines_all)
    npt.assert_equal(sl[0], [0, y, 1])
    npt.assert_equal(sl[-1], [0, y, 2])
    npt.assert_equal(len(sl), 2)

    # Check that the next streamline stops at 1 and 3 (ENDPOINT)
    y = 1
    sl = next(streamlines_not_all)
    npt.assert_equal(sl[0], [0, y, 1])
    npt.assert_equal(sl[-1], [0, y, 3])
    npt.assert_equal(len(sl), 3)

    sl = next(streamlines_all)
    npt.assert_equal(sl[0], [0, y, 1])
    npt.assert_equal(sl[-1], [0, y, 3])
    npt.assert_equal(len(sl), 3)

    # This streamline should be the same as above. This row does not have
    # ENDPOINTs, but the streamline should stop at the edge and not include
    # OUTSIDEIMAGE points.
    y = 2
    sl = next(streamlines_not_all)
    npt.assert_equal(sl[0], [0, y, 0])
    npt.assert_equal(sl[-1], [0, y, 4])
    npt.assert_equal(len(sl), 5)

    sl = next(streamlines_all)
    npt.assert_equal(sl[0], [0, y, 0])
    npt.assert_equal(sl[-1], [0, y, 4])
    npt.assert_equal(len(sl), 5)

    # If we seed on the edge, the first (or last) point in the streamline
    # should be the seed.
    y = 3
    sl = next(streamlines_not_all)
    npt.assert_equal(sl[0], seeds[y])

    sl = next(streamlines_all)
    npt.assert_equal(sl[0], seeds[y])

    # The last 3 seeds should not produce streamlines,
    # INVALIDPOINT streamlines are rejected (return_all=False).
    npt.assert_equal(len(list(streamlines_not_all)), 0)

    # The last 3 seeds should produce invalid streamlines,
    # INVALIDPOINT streamlines are kept (return_all=True).
    # The streamline stops at 1 (INVALIDPOINT) and 3 (ENDPOINT)
    y = 4
    sl = next(streamlines_all)
    npt.assert_equal(sl[0], [0, y, 1])
    npt.assert_equal(sl[-1], [0, y, 3])
    npt.assert_equal(len(sl), 3)

    # The streamline stops at 0 (INVALIDPOINT) and 2 (INVALIDPOINT)
    y = 5
    sl = next(streamlines_all)
    npt.assert_equal(sl[0], [0, y, 1])
    npt.assert_equal(sl[-1], [0, y, 2])
    npt.assert_equal(len(sl), 2)

    # The streamline should contain only one point, the seed point,
    # because no valid inital direction was returned.
    y = 6
    sl = next(streamlines_all)
    npt.assert_equal(sl[0], seeds[y])
    npt.assert_equal(sl[-1], seeds[y])
    npt.assert_equal(len(sl), 1)

    # The streamline should contain only one point, the seed point,
    # because no valid neighboring voxel (ENDPOINT)
    y = 7
    sl = next(streamlines_all)
    npt.assert_equal(sl[0], seeds[y])
    npt.assert_equal(sl[-1], seeds[y])
    npt.assert_equal(len(sl), 1)
Exemple #17
0
def main():
    start = time.time()

    with open('config.json') as config_json:
        config = json.load(config_json)

    # Load the data
    dmri_image = nib.load(config['data_file'])
    dmri = dmri_image.get_data()
    affine = dmri_image.affine
    #aparc_im = nib.load(config['freesurfer'])
    aparc_im = nib.load('volume.nii.gz')
    aparc = aparc_im.get_data()
    end = time.time()
    print('Loaded Files: ' + str((end - start)))
    print(dmri.shape)
    print(aparc.shape)

    # Create the white matter and callosal masks
    start = time.time()
    wm_regions = [
        2, 41, 16, 17, 28, 60, 51, 53, 12, 52, 12, 52, 13, 18, 54, 50, 11, 251,
        252, 253, 254, 255, 10, 49, 46, 7
    ]

    wm_mask = np.zeros(aparc.shape)
    for l in wm_regions:
        wm_mask[aparc == l] = 1
    #np.save('wm_mask',wm_mask)
    #p = os.getcwd()+'wm.json'
    #json.dump(wm_mask, codecs.open(p, 'w', encoding='utf-8'), separators=(',', ':'), sort_keys=True, indent=4)
    #with open('wm_mask.txt', 'wb') as wm:
    #np.savetxt('wm.txt', wm_mask, fmt='%5s')
    #print(wm_mask)
    # Create the gradient table from the bvals and bvecs

    bvals, bvecs = read_bvals_bvecs(config['data_bval'], config['data_bvec'])

    gtab = gradient_table(bvals, bvecs, b0_threshold=100)
    end = time.time()
    print('Created Gradient Table: ' + str((end - start)))

    ##The probabilistic model##
    """
    # Use the Constant Solid Angle (CSA) to find the Orientation Dist. Function
    # Helps orient the wm tracts
    start = time.time()
    csa_model = CsaOdfModel(gtab, sh_order=6)
    csa_peaks = peaks_from_model(csa_model, dmri, default_sphere,
                                 relative_peak_threshold=.8,
                                 min_separation_angle=45,
                                 mask=wm_mask)
    print('Creating CSA Model: ' + str(time.time() - start))
    """
    # Use the SHORE model to find Orientation Dist. Function
    start = time.time()
    shore_model = ShoreModel(gtab)
    shore_peaks = peaks_from_model(shore_model,
                                   dmri,
                                   default_sphere,
                                   relative_peak_threshold=.8,
                                   min_separation_angle=45,
                                   mask=wm_mask)
    print('Creating Shore Model: ' + str(time.time() - start))

    # Begins the seed in the wm tracts
    seeds = utils.seeds_from_mask(wm_mask, density=[1, 1, 1], affine=affine)
    print('Created White Matter seeds: ' + str(time.time() - start))

    # Create a CSD model to measure Fiber Orientation Dist
    print('Begin the probabilistic model')

    response, ratio = auto_response(gtab, dmri, roi_radius=10, fa_thr=0.7)
    csd_model = ConstrainedSphericalDeconvModel(gtab, response, sh_order=6)
    csd_fit = csd_model.fit(data=dmri, mask=wm_mask)
    print('Created the CSD model: ' + str(time.time() - start))

    # Set the Direction Getter to randomly choose directions

    prob_dg = ProbabilisticDirectionGetter.from_shcoeff(csd_fit.shm_coeff,
                                                        max_angle=30.,
                                                        sphere=default_sphere)
    print('Created the Direction Getter: ' + str(time.time() - start))

    # Restrict the white matter tracking
    classifier = ThresholdTissueClassifier(shore_peaks.gfa, .25)

    print('Created the Tissue Classifier: ' + str(time.time() - start))

    # Create the probabilistic model
    streamlines = LocalTracking(prob_dg,
                                tissue_classifier=classifier,
                                seeds=seeds,
                                step_size=.5,
                                max_cross=1,
                                affine=affine)
    print('Created the probabilistic model: ' + str(time.time() - start))

    # Compute streamlines and store as a list.
    streamlines = list(streamlines)
    print('Computed streamlines: ' + str(time.time() - start))

    #from dipy.tracking.streamline import transform_streamlines
    #streamlines = transform_streamlines(streamlines, np.linalg.inv(affine))

    # Create a tractogram from the streamlines and save it
    tractogram = Tractogram(streamlines, affine_to_rasmm=affine)
    save(tractogram, 'track.tck')
    end = time.time()
    print("Created the tck file: " + str((end - start)))
Exemple #18
0
def test_ProbabilisticOdfWeightedTracker():
    """This tests that the Probabalistic Direction Getter plays nice
    LocalTracking and produces reasonable streamlines in a simple example.
    """
    sphere = HemiSphere.from_sphere(unit_octahedron)

    # A simple image with three possible configurations, a vertical tract,
    # a horizontal tract and a crossing
    pmf_lookup = np.array([[0., 0., 1.],
                           [1., 0., 0.],
                           [0., 1., 0.],
                           [.5, .5, 0.]])
    simple_image = np.array([[0, 1, 0, 0, 0, 0],
                             [0, 1, 0, 0, 0, 0],
                             [0, 3, 2, 2, 2, 0],
                             [0, 1, 0, 0, 0, 0],
                             [0, 1, 0, 0, 0, 0],
                             ])

    simple_image = simple_image[..., None]
    pmf = pmf_lookup[simple_image]

    seeds = [np.array([1., 1., 0.])] * 30

    mask = (simple_image > 0).astype(float)
    tc = ThresholdTissueClassifier(mask, .5)

    dg = ProbabilisticDirectionGetter.from_pmf(pmf, 90, sphere)
    streamlines = LocalTracking(dg, tc, seeds, np.eye(4), 1.)

    expected = [np.array([[ 0.,  1.,  0.],
                          [ 1.,  1.,  0.],
                          [ 2.,  1.,  0.],
                          [ 2.,  2.,  0.],
                          [ 2.,  3.,  0.],
                          [ 2.,  4.,  0.],
                          [ 2.,  5.,  0.]]),
                np.array([[ 0.,  1.,  0.],
                          [ 1.,  1.,  0.],
                          [ 2.,  1.,  0.],
                          [ 3.,  1.,  0.],
                          [ 4.,  1.,  0.]])
               ]

    def allclose(x, y):
        return x.shape == y.shape and np.allclose(x, y)

    path = [False, False]
    for sl in streamlines:
        dir = ( -sphere.vertices[0] ).copy()
        if allclose(sl, expected[0]):
            path[0] = True
        elif allclose(sl, expected[1]):
            path[1] = True
        else:
            raise AssertionError()
    npt.assert_(all(path))

    # The first path is not possible if 90 degree turns are excluded
    dg = ProbabilisticDirectionGetter.from_pmf(pmf, 80, sphere)
    streamlines = LocalTracking(dg, tc, seeds, np.eye(4), 1.)

    for sl in streamlines:
        npt.assert_(np.allclose(sl, expected[1]))
Exemple #19
0
from dipy.reconst.csdeconv import auto_response
from dipy.reconst.csdeconv import ConstrainedSphericalDeconvModel

response, ratio = auto_response(gtab, data, roi_radius=10, fa_thr=0.7)
csd_model = ConstrainedSphericalDeconvModel(gtab, response)
csd_fit = csd_model.fit(data_small)
csd_fit_shm = np.lib.pad(csd_fit.shm_coeff, ((xa, dshape[0]-xb),
                                             (ya, dshape[1]-yb),
                                             (za, dshape[2]-zb),
                                             (0, 0)), 'constant')

# Probabilistic direction getting for fiber tracking
from dipy.direction import ProbabilisticDirectionGetter

prob_dg = ProbabilisticDirectionGetter.from_shcoeff(csd_fit_shm,
                                                    max_angle=30.,
                                                    sphere=default_sphere)

"""
The optic radiation is reconstructed by tracking fibers from the calcarine
sulcus (visual cortex V1) to the lateral geniculate nucleus (LGN). We seed
from the calcarine sulcus by selecting a region-of-interest (ROI) cube of
dimensions 3x3x3 voxels.
"""

# Set a seed region region for tractography.
from dipy.tracking import utils

mask = np.zeros(data.shape[:-1], 'bool')
rad = 3
mask[26-rad:26+rad, 29-rad:29+rad, 31-rad:31+rad] = True
Exemple #20
0
                             mask=mask)

#csd_peaks = peaks_from_model(model=csd_model,
#                             data=data,
#                             sphere=default_sphere,
#                             relative_peak_threshold=relative_peak_threshold,
#                             min_separation_angle=min_separation_angle,
#                             mask=mask)

streamline_eudx = EuDX(csa_peaks.peak_values, csa_peaks.peak_indices,
                            odf_vertices=default_sphere.vertices,
                            a_low=threshold_tissue_classifier, step_sz=step_size, seeds=seeds)

save(streamline_eudx, streamline_eudx.affine, mask.shape, '1.trk', lenght_threshold)

detmax_dg = DeterministicMaximumDirectionGetter.from_shcoeff(csa_peaks.shm_coeff, max_angle=max_angle, sphere=default_sphere)
tensor_model = dti.TensorModel(gtab)
dti_fit = tensor_model.fit(data, mask=mask)
FA = fractional_anisotropy(dti_fit.evals)
classifier = ThresholdTissueClassifier(FA, threshold_tissue_classifier)
streamlines_dmdg = LocalTracking(detmax_dg, classifier, seeds, affine, step_size=step_size)

save(streamlines_dmdg, streamline_eudx.affine, mask.shape, '1.trk', lenght_threshold)

classifier = ThresholdTissueClassifier(csa_peaks.gfa, threshold_tissue_classifier)
prob_dg = ProbabilisticDirectionGetter.from_shcoeff(csa_peaks.shm_coeff, max_angle=max_angle, sphere=default_sphere)
streamlines_pdg = LocalTracking(prob_dg, classifier, seeds, affine, step_size=step_size)

save(streamlines_pdg, streamline_eudx.affine, mask.shape, '1.trk', lenght_threshold)

#M, grouping = connectivity_matrix(streamlines, labels, affine=s_affine, symmetric=True, return_mapping=True, mapping_as_streamlines=True)
Exemple #21
0
def run_tracking(step_curv_combinations,
                 recon_shelved,
                 n_seeds_per_iter,
                 traversal,
                 maxcrossing,
                 max_length,
                 pft_back_tracking_dist,
                 pft_front_tracking_dist,
                 particle_count,
                 roi_neighborhood_tol,
                 min_length,
                 track_type,
                 min_separation_angle,
                 sphere,
                 tiss_class,
                 tissue_shelved,
                 verbose=False):
    """
    Create a density map of the list of streamlines.

    Parameters
    ----------
    step_curv_combinations : list
        List of tuples representing all pair combinations of step sizes and
        curvature thresholds from which to sample streamlines.
    recon_path : str
        File path to diffusion reconstruction model.
    n_seeds_per_iter : int
        Number of seeds from which to initiate tracking for each unique
        ensemble combination. By default this is set to 250.
    directget : str
        The statistical approach to tracking. Options are: det (deterministic),
        closest (clos), boot (bootstrapped), and prob (probabilistic).
    maxcrossing : int
        Maximum number if diffusion directions that can be assumed per voxel
        while tracking.
    max_length : int
        Maximum number of steps to restrict tracking.
    pft_back_tracking_dist : float
        Distance in mm to back track before starting the particle filtering
        tractography. The total particle filtering tractography distance is
        equal to back_tracking_dist + front_tracking_dist. By default this is
        set to 2 mm.
    pft_front_tracking_dist : float
        Distance in mm to run the particle filtering tractography after the
        the back track distance. The total particle filtering tractography
        distance is equal to back_tracking_dist + front_tracking_dist. By
        default this is set to 1 mm.
    particle_count : int
        Number of particles to use in the particle filter.
    roi_neighborhood_tol : float
        Distance (in the units of the streamlines, usually mm). If any
        coordinate in the streamline is within this distance from the center
        of any voxel in the ROI, the filtering criterion is set to True for
        this streamline, otherwise False. Defaults to the distance between
        the center of each voxel and the corner of the voxel.
    waymask_data : ndarray
        Tractography constraint mask array in native diffusion space.
    min_length : int
        Minimum fiber length threshold in mm to restrict tracking.
    track_type : str
        Tracking algorithm used (e.g. 'local' or 'particle').
    min_separation_angle : float
        The minimum angle between directions [0, 90].
    sphere : obj
        DiPy object for modeling diffusion directions on a sphere.
    tiss_class : str
        Tissue classification method.
    tissue_shelved : str
        File path to joblib-shelved 4D T1w tissue segmentations in native
        diffusion space.

    Returns
    -------
    streamlines : ArraySequence
        DiPy list/array-like object of streamline points from tractography.
    """
    import gc
    import time
    import numpy as np
    from dipy.tracking import utils
    from dipy.tracking.streamline import select_by_rois
    from dipy.tracking.local_tracking import LocalTracking, \
        ParticleFilteringTracking
    from dipy.direction import (ProbabilisticDirectionGetter,
                                ClosestPeakDirectionGetter,
                                DeterministicMaximumDirectionGetter)
    from nilearn.image import index_img, math_img
    from pynets.dmri.utils import generate_seeds, random_seeds_from_mask
    from nibabel.streamlines.array_sequence import ArraySequence

    start_time = time.time()

    if verbose is True:
        print("%s%s%s" % ('Preparing tissue constraints:',
                          np.round(time.time() - start_time, 1), 's'))
        start_time = time.time()

    tissue_img = tissue_shelved.get()

    # Order:
    B0_mask = index_img(tissue_img, 0)
    atlas_img = index_img(tissue_img, 1)
    t1w2dwi = index_img(tissue_img, 3)
    gm_in_dwi = index_img(tissue_img, 4)
    vent_csf_in_dwi = index_img(tissue_img, 5)
    wm_in_dwi = index_img(tissue_img, 6)
    tissue_img.uncache()

    tiss_classifier = prep_tissues(t1w2dwi, gm_in_dwi, vent_csf_in_dwi,
                                   wm_in_dwi, tiss_class, B0_mask)

    # if verbose is True:
    #     print("%s%s%s" % (
    #     'Fitting tissue classifier:',
    #     np.round(time.time() - start_time, 1), 's'))
    #     start_time = time.time()

    if verbose is True:
        print("%s%s%s" % ('Loading reconstruction:',
                          np.round(time.time() - start_time, 1), 's'))
        start_time = time.time()

        print("%s%s" % ("Curvature: ", step_curv_combinations[1]))

    # Instantiate DirectionGetter
    if traversal.lower() in ["probabilistic", "prob"]:
        dg = ProbabilisticDirectionGetter.from_shcoeff(
            recon_shelved.get(),
            max_angle=float(step_curv_combinations[1]),
            sphere=sphere,
            min_separation_angle=min_separation_angle,
        )
    elif traversal.lower() in ["closestpeaks", "cp"]:
        dg = ClosestPeakDirectionGetter.from_shcoeff(
            recon_shelved.get(),
            max_angle=float(step_curv_combinations[1]),
            sphere=sphere,
            min_separation_angle=min_separation_angle,
        )
    elif traversal.lower() in ["deterministic", "det"]:
        maxcrossing = 1
        dg = DeterministicMaximumDirectionGetter.from_shcoeff(
            recon_shelved.get(),
            max_angle=float(step_curv_combinations[1]),
            sphere=sphere,
            min_separation_angle=min_separation_angle,
        )
    else:
        raise ValueError("ERROR: No valid direction getter(s) specified.")

    if verbose is True:
        print("%s%s%s" % ('Extracting directions:',
                          np.round(time.time() - start_time, 1), 's'))
        start_time = time.time()
        print("%s%s" % ("Step: ", step_curv_combinations[0]))

    # Perform wm-gm interface seeding, using n_seeds at a time
    seeds = generate_seeds(
        random_seeds_from_mask(np.asarray(
            math_img("img > 0.01", img=index_img(
                tissue_img, 2)).dataobj).astype("bool").astype("int16") > 0,
                               seeds_count=n_seeds_per_iter,
                               random_seed=42))

    if verbose is True:
        print("%s%s%s" % ('Drawing random seeds:',
                          np.round(time.time() - start_time, 1), 's'))
        start_time = time.time()
        # print(seeds)

    # Perform tracking
    if track_type == "local":
        streamline_generator = LocalTracking(dg,
                                             tiss_classifier,
                                             np.stack([i for i in seeds]),
                                             np.eye(4),
                                             max_cross=int(maxcrossing),
                                             maxlen=int(max_length),
                                             step_size=float(
                                                 step_curv_combinations[0]),
                                             fixedstep=False,
                                             return_all=True,
                                             random_seed=42)
    elif track_type == "particle":
        streamline_generator = ParticleFilteringTracking(
            dg,
            tiss_classifier,
            np.stack([i for i in seeds]),
            np.eye(4),
            max_cross=int(maxcrossing),
            step_size=float(step_curv_combinations[0]),
            maxlen=int(max_length),
            pft_back_tracking_dist=pft_back_tracking_dist,
            pft_front_tracking_dist=pft_front_tracking_dist,
            pft_max_trial=20,
            particle_count=particle_count,
            return_all=True,
            random_seed=42)
    else:
        raise ValueError("ERROR: No valid tracking method(s) specified.")

    if verbose is True:
        print("%s%s%s" % ('Instantiating tracking:',
                          np.round(time.time() - start_time, 1), 's'))
        start_time = time.time()
        # print(seeds)

    del dg

    # Filter resulting streamlines by those that stay entirely
    # inside the brain
    try:
        roi_proximal_streamlines = utils.target(
            streamline_generator,
            np.eye(4),
            np.asarray(B0_mask.dataobj).astype('bool'),
            include=True)
    except BaseException:
        print('No streamlines found inside the brain! ' 'Check registrations.')
        #return None

    if verbose is True:
        print("%s%s%s" % ('Drawing streamlines:',
                          np.round(time.time() - start_time, 1), 's'))
        start_time = time.time()

    del seeds, tiss_classifier, streamline_generator

    B0_mask.uncache()
    atlas_img.uncache()
    t1w2dwi.uncache()
    gm_in_dwi.uncache()
    vent_csf_in_dwi.uncache()
    wm_in_dwi.uncache()
    gc.collect()

    # Filter resulting streamlines by roi-intersection
    # characteristics
    atlas_data = np.array(atlas_img.dataobj).astype("uint16")

    # Build mask vector from atlas for later roi filtering
    parcels = [
        atlas_data == roi_val
        for roi_val in [i for i in np.unique(atlas_data) if i != 0]
    ]

    try:
        roi_proximal_streamlines = \
                select_by_rois(
                    roi_proximal_streamlines,
                    affine=np.eye(4),
                    rois=parcels,
                    include=list(np.ones(len(parcels)).astype("bool")),
                    mode="any",
                    tol=roi_neighborhood_tol,
                )
    except BaseException:
        print('No streamlines found to connect any parcels! '
              'Check registrations.')
        #return None

    del atlas_data

    if verbose is True:
        print("%s%s%s" % ('Selecting by parcellation:',
                          np.round(time.time() - start_time, 1), 's'))
        start_time = time.time()

    del parcels

    gc.collect()

    if verbose is True:
        print("%s%s%s" % ('Selecting by minimum length criterion:',
                          np.round(time.time() - start_time, 1), 's'))

    gc.collect()

    return ArraySequence([
        s.astype("float32") for s in roi_proximal_streamlines
        if len(s) > float(min_length)
    ])
Exemple #22
0
def run_tracking(step_curv_combinations, recon_path, n_seeds_per_iter,
                 directget, maxcrossing, max_length, pft_back_tracking_dist,
                 pft_front_tracking_dist, particle_count, roi_neighborhood_tol,
                 waymask, min_length, track_type, min_separation_angle, sphere,
                 tiss_class, tissues4d, cache_dir):

    import gc
    import os
    import h5py
    from dipy.tracking import utils
    from dipy.tracking.streamline import select_by_rois
    from dipy.tracking.local_tracking import LocalTracking, \
        ParticleFilteringTracking
    from dipy.direction import (ProbabilisticDirectionGetter,
                                ClosestPeakDirectionGetter,
                                DeterministicMaximumDirectionGetter)
    from nilearn.image import index_img
    from pynets.dmri.track import prep_tissues
    from nibabel.streamlines.array_sequence import ArraySequence
    from nipype.utils.filemanip import copyfile, fname_presuffix

    recon_path_tmp_path = fname_presuffix(recon_path,
                                          suffix=f"_{step_curv_combinations}",
                                          newpath=cache_dir)
    copyfile(recon_path, recon_path_tmp_path, copy=True, use_hardlink=False)

    if waymask is not None:
        waymask_tmp_path = fname_presuffix(waymask,
                                           suffix=f"_{step_curv_combinations}",
                                           newpath=cache_dir)
        copyfile(waymask, waymask_tmp_path, copy=True, use_hardlink=False)
    else:
        waymask_tmp_path = None

    tissue_img = nib.load(tissues4d)

    # Order:
    B0_mask = index_img(tissue_img, 0)
    atlas_img = index_img(tissue_img, 1)
    atlas_data_wm_gm_int = index_img(tissue_img, 2)
    t1w2dwi = index_img(tissue_img, 3)
    gm_in_dwi = index_img(tissue_img, 4)
    vent_csf_in_dwi = index_img(tissue_img, 5)
    wm_in_dwi = index_img(tissue_img, 6)

    tiss_classifier = prep_tissues(t1w2dwi, gm_in_dwi, vent_csf_in_dwi,
                                   wm_in_dwi, tiss_class, B0_mask)

    B0_mask_data = np.asarray(B0_mask.dataobj).astype("bool")
    atlas_data = np.array(atlas_img.dataobj).astype("uint16")
    atlas_data_wm_gm_int_data = np.asarray(
        atlas_data_wm_gm_int.dataobj).astype("bool").astype("int16")

    # Build mask vector from atlas for later roi filtering
    parcels = []
    i = 0
    intensities = [i for i in np.unique(atlas_data) if i != 0]
    for roi_val in intensities:
        parcels.append(atlas_data == roi_val)
        i += 1

    del atlas_data

    parcel_vec = list(np.ones(len(parcels)).astype("bool"))

    with h5py.File(recon_path_tmp_path, 'r+') as hf:
        mod_fit = hf['reconstruction'][:].astype('float32')
    hf.close()

    print("%s%s" % ("Curvature: ", step_curv_combinations[1]))

    # Instantiate DirectionGetter
    if directget == "prob" or directget == "probabilistic":
        dg = ProbabilisticDirectionGetter.from_shcoeff(
            mod_fit,
            max_angle=float(step_curv_combinations[1]),
            sphere=sphere,
            min_separation_angle=min_separation_angle,
        )
    elif directget == "clos" or directget == "closest":
        dg = ClosestPeakDirectionGetter.from_shcoeff(
            mod_fit,
            max_angle=float(step_curv_combinations[1]),
            sphere=sphere,
            min_separation_angle=min_separation_angle,
        )
    elif directget == "det" or directget == "deterministic":
        maxcrossing = 1
        dg = DeterministicMaximumDirectionGetter.from_shcoeff(
            mod_fit,
            max_angle=float(step_curv_combinations[1]),
            sphere=sphere,
            min_separation_angle=min_separation_angle,
        )
    else:
        raise ValueError("ERROR: No valid direction getter(s) specified.")

    print("%s%s" % ("Step: ", step_curv_combinations[0]))

    # Perform wm-gm interface seeding, using n_seeds at a time
    seeds = utils.random_seeds_from_mask(
        atlas_data_wm_gm_int_data > 0,
        seeds_count=n_seeds_per_iter,
        seed_count_per_voxel=False,
        affine=np.eye(4),
    )
    if len(seeds) == 0:
        print(
            UserWarning("No valid seed points found in wm-gm "
                        "interface..."))
        return None

    # print(seeds)

    # Perform tracking
    if track_type == "local":
        streamline_generator = LocalTracking(
            dg,
            tiss_classifier,
            seeds,
            np.eye(4),
            max_cross=int(maxcrossing),
            maxlen=int(max_length),
            step_size=float(step_curv_combinations[0]),
            fixedstep=False,
            return_all=True,
        )
    elif track_type == "particle":
        streamline_generator = ParticleFilteringTracking(
            dg,
            tiss_classifier,
            seeds,
            np.eye(4),
            max_cross=int(maxcrossing),
            step_size=float(step_curv_combinations[0]),
            maxlen=int(max_length),
            pft_back_tracking_dist=pft_back_tracking_dist,
            pft_front_tracking_dist=pft_front_tracking_dist,
            particle_count=particle_count,
            return_all=True,
        )
    else:
        try:
            raise ValueError("ERROR: No valid tracking method(s) specified.")
        except ValueError:
            import sys
            sys.exit(0)

    # Filter resulting streamlines by those that stay entirely
    # inside the brain
    try:
        roi_proximal_streamlines = utils.target(streamline_generator,
                                                np.eye(4),
                                                B0_mask_data,
                                                include=True)
    except BaseException:
        print('No streamlines found inside the brain! ' 'Check registrations.')
        return None

    # Filter resulting streamlines by roi-intersection
    # characteristics

    try:
        roi_proximal_streamlines = \
            nib.streamlines.array_sequence.ArraySequence(
                select_by_rois(
                    roi_proximal_streamlines,
                    affine=np.eye(4),
                    rois=parcels,
                    include=parcel_vec,
                    mode="%s" % ("any" if waymask is not None else
                                 "both_end"),
                    tol=roi_neighborhood_tol,
                )
            )
        print("%s%s" % ("Filtering by: \nNode intersection: ",
                        len(roi_proximal_streamlines)))
    except BaseException:
        print('No streamlines found to connect any parcels! '
              'Check registrations.')
        return None

    try:
        roi_proximal_streamlines = nib.streamlines. \
            array_sequence.ArraySequence(
            [
                s for s in roi_proximal_streamlines
                if len(s) >= float(min_length)
            ]
        )
        print(f"Minimum fiber length >{min_length}mm: "
              f"{len(roi_proximal_streamlines)}")
    except BaseException:
        print('No streamlines remaining after minimal length criterion.')
        return None

    if waymask is not None and os.path.isfile(waymask_tmp_path):
        from nilearn.image import math_img
        mask = math_img("img > 0.0075", img=nib.load(waymask_tmp_path))
        waymask_data = np.asarray(mask.dataobj).astype("bool")
        try:
            roi_proximal_streamlines = roi_proximal_streamlines[utils.near_roi(
                roi_proximal_streamlines,
                np.eye(4),
                waymask_data,
                tol=roi_neighborhood_tol,
                mode="all")]
            print("%s%s" %
                  ("Waymask proximity: ", len(roi_proximal_streamlines)))
        except BaseException:
            print('No streamlines remaining in waymask\'s vacinity.')
            return None

    out_streams = [s.astype("float32") for s in roi_proximal_streamlines]

    del dg, seeds, roi_proximal_streamlines, streamline_generator, \
        atlas_data_wm_gm_int_data, mod_fit, B0_mask_data

    os.remove(recon_path_tmp_path)
    gc.collect()

    try:
        return ArraySequence(out_streams)
    except BaseException:
        return None
Exemple #23
0
def fiber_tracking(subject):
    # declare the type of algorithm, \in [deterministic, probabilitic]
    algo = 'deterministic'
    #     algo = 'probabilitic'
    '''
    @param subject: string represents the subject name
    @param algo: the name for the algorithms, \in ['deterministic', 'probabilitic']
    @return streamlines: for saving the final results and visualization
    '''

    print('processing for', subject)
    fname, bval_fname, bvec_fname, label_fname = get_file_names(subject)

    data, sub_affine, img = load_nifti(fname, return_img=True)
    bvals, bvecs = read_bvals_bvecs(bval_fname, bvec_fname)
    gtab = gradient_table(bvals, bvecs)
    labels = load_nifti_data(label_fname)

    print('data loading complete.\n')
    ##################################################################

    # set mask(s) and seed(s)
    # global_mask = binary_dilation((data[:, :, :, 0] != 0))
    global_mask = binary_dilation((labels == 1) | (labels == 2))
    #     global_mask = binary_dilation((labels == 2) | (labels == 32) | (labels == 76))
    affine = np.eye(4)
    seeds = utils.seeds_from_mask(global_mask, affine, density=1)
    print('mask(s) and seed(s) set complete.\n')
    ##################################################################

    print('getting directions from diffusion dataset...')

    # define tracking mask with Constant Solid Angle (CSA)
    csamodel = CsaOdfModel(gtab, 6)
    stopping_criterion = BinaryStoppingCriterion(global_mask)

    # define direction criterion
    direction_criterion = None
    print('Compute directions...')
    if algo == "deterministic":
        # EuDX
        direction_criterion = peaks.peaks_from_model(
            model=csamodel,
            data=data,
            sphere=peaks.default_sphere,
            relative_peak_threshold=.8,
            min_separation_angle=45,
            mask=global_mask)


#         # Deterministic Algorithm (select direction with max probability)
#         direction_criterion = DeterministicMaximumDirectionGetter.from_shcoeff(
#             csd_fit.shm_coeff,
#             max_angle=30.,
#             sphere=default_sphere)
    else:
        response, ratio = auto_response(gtab, data, roi_radius=10, fa_thr=0.7)

        # fit the reconstruction model with Constrained Spherical Deconvolusion (CSD)
        csd_model = ConstrainedSphericalDeconvModel(gtab, response, sh_order=6)
        csd_fit = csd_model.fit(data, mask=global_mask)

        #         gfa = csamodel.fit(data, mask=global_mask).gfa
        #     stopping_criterion = ThresholdStoppingCriterion(gfa, .25)

        # Probabilitic Algorithm
        direction_criterion = ProbabilisticDirectionGetter.from_shcoeff(
            csd_fit.shm_coeff, max_angle=30., sphere=default_sphere)

    print('direction computation complete.\n')
    ##################################################################

    print('start tracking process...')
    # start tracking
    streamline_generator = LocalTracking(direction_criterion,
                                         stopping_criterion,
                                         seeds,
                                         affine=affine,
                                         step_size=0.5)

    # Generate streamlines object
    streamlines = Streamlines(streamline_generator)

    sft = StatefulTractogram(streamlines, img, Space.RASMM)

    print('traking complete.\n')
    ##################################################################

    return {
        "subject": subject,
        "streamlines": streamlines,
        "sft": sft,
        "affine": sub_affine,
        "data": data,
        "img": img,
        "labels": labels
    }
Exemple #24
0
def track_ensemble(target_samples, atlas_data_wm_gm_int, parcels, mod_fit, tiss_classifier, sphere, directget,
                   curv_thr_list, step_list, track_type, maxcrossing, roi_neighborhood_tol, min_length, waymask,
                   B0_mask, max_length=1000, n_seeds_per_iter=500, pft_back_tracking_dist=2, pft_front_tracking_dist=1,
                   particle_count=15, min_separation_angle=20):
    """
    Perform native-space ensemble tractography, restricted to a vector of ROI masks.

    target_samples : int
        Total number of streamline samples specified to generate streams.
    atlas_data_wm_gm_int : array
        3D int32 numpy array of atlas parcellation intensities from Nifti1Image in T1w-warped native diffusion space,
        restricted to wm-gm interface.
    parcels : list
        List of 3D boolean numpy arrays of atlas parcellation ROI masks from a Nifti1Image in T1w-warped native
        diffusion space.
    mod : obj
        Connectivity reconstruction model.
    tiss_classifier : str
        Tissue classification method.
    sphere : obj
        DiPy object for modeling diffusion directions on a sphere.
    directget : str
        The statistical approach to tracking. Options are: det (deterministic), closest (clos), boot (bootstrapped),
        and prob (probabilistic).
    curv_thr_list : list
        List of integer curvature thresholds used to perform ensemble tracking.
    step_list : list
        List of float step-sizes used to perform ensemble tracking.
    track_type : str
        Tracking algorithm used (e.g. 'local' or 'particle').
    maxcrossing : int
        Maximum number if diffusion directions that can be assumed per voxel while tracking.
    roi_neighborhood_tol : float
        Distance (in the units of the streamlines, usually mm). If any
        coordinate in the streamline is within this distance from the center
        of any voxel in the ROI, the filtering criterion is set to True for
        this streamline, otherwise False. Defaults to the distance between
        the center of each voxel and the corner of the voxel.
    min_length : int
        Minimum fiber length threshold in mm.
    waymask : str
        Path to a Nifti1Image in native diffusion space to constrain tractography.
    B0_mask : str
        File path to B0 brain mask.
    max_length : int
        Maximum number of steps to restrict tracking.
    n_seeds_per_iter : int
        Number of seeds from which to initiate tracking for each unique ensemble combination.
        By default this is set to 200.
    particle_count
        pft_back_tracking_dist : float
        Distance in mm to back track before starting the particle filtering
        tractography. The total particle filtering tractography distance is
        equal to back_tracking_dist + front_tracking_dist. By default this is set to 2 mm.
    pft_front_tracking_dist : float
        Distance in mm to run the particle filtering tractography after the
        the back track distance. The total particle filtering tractography
        distance is equal to back_tracking_dist + front_tracking_dist. By
        default this is set to 1 mm.
    particle_count : int
        Number of particles to use in the particle filter.
    min_separation_angle : float
        The minimum angle between directions [0, 90].

    Returns
    -------
    streamlines : ArraySequence
        DiPy list/array-like object of streamline points from tractography.

    References
    ----------
    .. [1] Takemura, H., Caiafa, C. F., Wandell, B. A., & Pestilli, F. (2016).
      Ensemble Tractography. PLoS Computational Biology.
      https://doi.org/10.1371/journal.pcbi.1004692

    """
    import gc
    import time
    from colorama import Fore, Style
    from dipy.tracking import utils
    from dipy.tracking.streamline import Streamlines, select_by_rois
    from dipy.tracking.local_tracking import LocalTracking, ParticleFilteringTracking
    from dipy.direction import (ProbabilisticDirectionGetter, ClosestPeakDirectionGetter,
                                DeterministicMaximumDirectionGetter)

    start = time.time()

    B0_mask_data = nib.load(B0_mask).get_fdata()

    if waymask:
        waymask_data = np.asarray(nib.load(waymask).dataobj).astype('bool')

    # Commence Ensemble Tractography
    parcel_vec = list(np.ones(len(parcels)).astype('bool'))
    streamlines = nib.streamlines.array_sequence.ArraySequence()

    circuit_ix = 0
    stream_counter = 0
    while int(stream_counter) < int(target_samples):
        for curv_thr in curv_thr_list:
            print("%s%s" % ('Curvature: ', curv_thr))

            # Instantiate DirectionGetter
            if directget == 'prob':
                dg = ProbabilisticDirectionGetter.from_shcoeff(mod_fit, max_angle=float(curv_thr), sphere=sphere,
                                                               min_separation_angle=min_separation_angle)
            elif directget == 'clos':
                dg = ClosestPeakDirectionGetter.from_shcoeff(mod_fit, max_angle=float(curv_thr), sphere=sphere,
                                                             min_separation_angle=min_separation_angle)
            elif directget == 'det':
                dg = DeterministicMaximumDirectionGetter.from_shcoeff(mod_fit, max_angle=float(curv_thr), sphere=sphere,
                                                                      min_separation_angle=min_separation_angle)
            else:
                raise ValueError('ERROR: No valid direction getter(s) specified.')

            for step in step_list:
                print("%s%s" % ('Step: ', step))

                # Perform wm-gm interface seeding, using n_seeds at a time
                seeds = utils.random_seeds_from_mask(atlas_data_wm_gm_int > 0, seeds_count=n_seeds_per_iter,
                                                     seed_count_per_voxel=False, affine=np.eye(4))
                if len(seeds) == 0:
                    raise RuntimeWarning('Warning: No valid seed points found in wm-gm interface...')

                # print(seeds)

                # Perform tracking
                if track_type == 'local':
                    streamline_generator = LocalTracking(dg, tiss_classifier, seeds, np.eye(4),
                                                         max_cross=int(maxcrossing), maxlen=int(max_length),
                                                         step_size=float(step), fixedstep=False, return_all=True)
                elif track_type == 'particle':
                    streamline_generator = ParticleFilteringTracking(dg, tiss_classifier, seeds, np.eye(4),
                                                                     max_cross=int(maxcrossing),
                                                                     step_size=float(step),
                                                                     maxlen=int(max_length),
                                                                     pft_back_tracking_dist=pft_back_tracking_dist,
                                                                     pft_front_tracking_dist=pft_front_tracking_dist,
                                                                     particle_count=particle_count,
                                                                     return_all=True)
                else:
                    raise ValueError('ERROR: No valid tracking method(s) specified.')

                # Filter resulting streamlines by those that stay entirely inside the brain
                roi_proximal_streamlines = utils.target(streamline_generator, np.eye(4), B0_mask_data,
                                                        include=True)

                # Filter resulting streamlines by roi-intersection characteristics
                roi_proximal_streamlines = Streamlines(select_by_rois(roi_proximal_streamlines, affine=np.eye(4),
                                                                      rois=parcels, include=parcel_vec,
                                                                      mode='both_end',
                                                                      tol=roi_neighborhood_tol))

                print("%s%s" % ('Filtering by: \nnode intersection: ', len(roi_proximal_streamlines)))

                if str(min_length) != '0':
                    roi_proximal_streamlines = nib.streamlines.array_sequence.ArraySequence([s for s in
                                                                                             roi_proximal_streamlines
                                                                                             if len(s) >=
                                                                                             float(min_length)])

                    print("%s%s" % ('Minimum length criterion: ', len(roi_proximal_streamlines)))

                if waymask:
                    roi_proximal_streamlines = roi_proximal_streamlines[utils.near_roi(roi_proximal_streamlines,
                                                                                       np.eye(4),
                                                                                       waymask_data,
                                                                                       tol=roi_neighborhood_tol,
                                                                                       mode='any')]
                    print("%s%s" % ('Waymask proximity: ', len(roi_proximal_streamlines)))

                out_streams = [s.astype('float32') for s in roi_proximal_streamlines]
                streamlines.extend(out_streams)
                stream_counter = stream_counter + len(out_streams)

                # Cleanup memory
                del seeds, roi_proximal_streamlines, streamline_generator, out_streams
                gc.collect()
            del dg

        circuit_ix = circuit_ix + 1
        print("%s%s%s%s%s%s" % ('Completed Hyperparameter Circuit: ', circuit_ix,
                                '\nCumulative Streamline Count: ', Fore.CYAN, stream_counter, "\n"))
        print(Style.RESET_ALL)

    print('Tracking Complete:\n', str(time.time() - start))

    return streamlines
Exemple #25
0
   White matter volume fraction slice
"""
"""
These discrete fODFs can be used as a PMF in the `ProbabilisticDirectionGetter`
for sampling tracking directions. The PMF must be strictly non-negative;
RUMBA-SD already adheres to this constraint so no further manipulation of the
fODFs is necessary.
"""

from dipy.direction import ProbabilisticDirectionGetter
from dipy.io.stateful_tractogram import Space, StatefulTractogram
from dipy.io.streamline import save_trk

prob_dg = ProbabilisticDirectionGetter.from_pmf(odf,
                                                max_angle=30.,
                                                sphere=sphere)
streamline_generator = LocalTracking(prob_dg,
                                     stopping_criterion,
                                     seeds,
                                     affine,
                                     step_size=.5)
streamlines = Streamlines(streamline_generator)

color = colormap.line_colors(streamlines)
streamlines_actor = actor.streamtube(list(
    transform_streamlines(streamlines, inv(t1_aff))),
                                     color,
                                     linewidth=0.1)

vol_actor = actor.slicer(t1_data)
Exemple #26
0
def test_probabilistic_odf_weighted_tracker():
    """This tests that the Probabalistic Direction Getter plays nice
    LocalTracking and produces reasonable streamlines in a simple example.
    """
    sphere = HemiSphere.from_sphere(unit_octahedron)

    # A simple image with three possible configurations, a vertical tract,
    # a horizontal tract and a crossing
    pmf_lookup = np.array([[0., 0., 1.],
                           [1., 0., 0.],
                           [0., 1., 0.],
                           [.6, .4, 0.]])
    simple_image = np.array([[0, 1, 0, 0, 0, 0],
                             [0, 1, 0, 0, 0, 0],
                             [0, 3, 2, 2, 2, 0],
                             [0, 1, 0, 0, 0, 0],
                             [0, 1, 0, 0, 0, 0],
                             ])

    simple_image = simple_image[..., None]
    pmf = pmf_lookup[simple_image]

    seeds = [np.array([1., 1., 0.])] * 30

    mask = (simple_image > 0).astype(float)
    tc = ThresholdTissueClassifier(mask, .5)

    dg = ProbabilisticDirectionGetter.from_pmf(pmf, 90, sphere,
                                               pmf_threshold=0.1)
    streamlines = LocalTracking(dg, tc, seeds, np.eye(4), 1.)

    expected = [np.array([[0., 1., 0.],
                          [1., 1., 0.],
                          [2., 1., 0.],
                          [2., 2., 0.],
                          [2., 3., 0.],
                          [2., 4., 0.],
                          [2., 5., 0.]]),
                np.array([[0., 1., 0.],
                          [1., 1., 0.],
                          [2., 1., 0.],
                          [3., 1., 0.],
                          [4., 1., 0.]])]

    def allclose(x, y):
        return x.shape == y.shape and np.allclose(x, y)

    path = [False, False]
    for sl in streamlines:
        if allclose(sl, expected[0]):
            path[0] = True
        elif allclose(sl, expected[1]):
            path[1] = True
        else:
            raise AssertionError()
    npt.assert_(all(path))

    # The first path is not possible if 90 degree turns are excluded
    dg = ProbabilisticDirectionGetter.from_pmf(pmf, 80, sphere,
                                               pmf_threshold=0.1)
    streamlines = LocalTracking(dg, tc, seeds, np.eye(4), 1.)

    for sl in streamlines:
        npt.assert_(np.allclose(sl, expected[1]))

    # The first path is not possible if pmf_threshold > 0.67
    # 0.4/0.6 < 2/3, multiplying the pmf should not change the ratio
    dg = ProbabilisticDirectionGetter.from_pmf(10*pmf, 90, sphere,
                                               pmf_threshold=0.67)
    streamlines = LocalTracking(dg, tc, seeds, np.eye(4), 1.)

    for sl in streamlines:
        npt.assert_(np.allclose(sl, expected[1]))

    # Test non WM seed position
    seeds = [[0, 0, 0], [5, 5, 5]]
    streamlines = LocalTracking(dg, tc, seeds, np.eye(4), 0.2, max_cross=1,
                                return_all=True)
    streamlines = Streamlines(streamlines)
    npt.assert_(len(streamlines[0]) == 3)  # INVALIDPOINT
    npt.assert_(len(streamlines[1]) == 1)  # OUTSIDEIMAGE

    # Test that all points are within the image volume
    seeds = seeds_from_mask(np.ones(mask.shape), density=2)
    streamline_generator = LocalTracking(dg, tc, seeds, np.eye(4), 0.5,
                                         return_all=True)
    streamlines = Streamlines(streamline_generator)
    for s in streamlines:
        npt.assert_(np.all((s + 0.5).astype(int) >= 0))
        npt.assert_(np.all((s + 0.5).astype(int) < mask.shape))
    # Test that the number of streamline return with return_all=True equal the
    # number of seeds places

    npt.assert_(np.array([len(streamlines) == len(seeds)]))

    # Test reproducibility
    tracking_1 = Streamlines(LocalTracking(dg, tc, seeds, np.eye(4),
                                           0.5,
                                           random_seed=0)).data
    tracking_2 = Streamlines(LocalTracking(dg, tc, seeds, np.eye(4),
                                           0.5,
                                           random_seed=0)).data
    npt.assert_equal(tracking_1, tracking_2)
Exemple #27
0
def run(context):

    ####################################################
    # Get the path to input files  and other parameter #
    ####################################################
    analysis_data = context.fetch_analysis_data()
    settings = analysis_data['settings']
    postprocessing = settings['postprocessing']
    dataset = settings['dataset']

    if dataset == "HCPL":
        dwi_file_handle = context.get_files('input', modality='HARDI')[0]
        dwi_file_path = dwi_file_handle.download('/root/')

        bvalues_file_handle = context.get_files(
            'input', reg_expression='.*prep.bvalues.hcpl.txt')[0]
        bvalues_file_path = bvalues_file_handle.download('/root/')
        bvecs_file_handle = context.get_files(
            'input', reg_expression='.*prep.gradients.hcpl.txt')[0]
        bvecs_file_path = bvecs_file_handle.download('/root/')
    elif dataset == "DSI":
        dwi_file_handle = context.get_files('input', modality='DSI')[0]
        dwi_file_path = dwi_file_handle.download('/root/')
        bvalues_file_handle = context.get_files(
            'input', reg_expression='.*prep.bvalues.txt')[0]
        bvalues_file_path = bvalues_file_handle.download('/root/')
        bvecs_file_handle = context.get_files(
            'input', reg_expression='.*prep.gradients.txt')[0]
        bvecs_file_path = bvecs_file_handle.download('/root/')
    else:
            context.set_progress(message='Wrong dataset parameter')

    inject_file_handle = context.get_files(
        'input', reg_expression='.*prep.inject.nii.gz')[0]
    inject_file_path = inject_file_handle.download('/root/')

    VUMC_ROIs_file_handle = context.get_files(
        'input', reg_expression='.*VUMC_ROIs.nii.gz')[0]
    VUMC_ROIs_file_path = VUMC_ROIs_file_handle.download('/root/')

    ###############################
    # _____ _____ _______     __  #
    # |  __ \_   _|  __ \ \   / / #
    # | |  | || | | |__) \ \_/ /  #
    # | |  | || | |  ___/ \   /   #
    # | |__| || |_| |      | |    #
    # |_____/_____|_|      |_|    #
    #                             #
    ###############################

    ########################################################################################
    #  _______             _          __  __   _______             _     __                #
    # |__   __|           | |        |  \/  | |__   __|           | |   / _|               #
    #    | |_ __ __ _  ___| | ___   _| \  / | ___| |_ __ __ _  ___| | _| |_ __ _  ___ ___  #
    #    | | '__/ _` |/ __| |/ / | | | |\/| |/ __| | '__/ _` |/ __| |/ /  _/ _` |/ __/ _ \ #
    #    | | | | (_| | (__|   <| |_| | |  | | (__| | | | (_| | (__|   <| || (_| | (_|  __/ #
    #    |_|_|  \__,_|\___|_|\_\\__, |_|  |_|\___|_|_|  \__,_|\___|_|\_\_| \__,_|\___\___| #
    #                            __/ |                                                     #
    #                           |___/                                                      #
    #                                                                                      #
    #                                                                                      #
    #                               IronTract Team                                         #
    ########################################################################################

    #################
    # Load the data #
    #################
    dwi_img = nib.load(dwi_file_path)
    bvals, bvecs = read_bvals_bvecs(bvalues_file_path,
                                    bvecs_file_path)
    gtab = gradient_table(bvals, bvecs)

    ############################################
    # Extract the brain mask from the b0 image #
    ############################################
    _, brain_mask = median_otsu(dwi_img.get_data()[:, :, :, 0],
                                median_radius=2, numpass=1)

    ##################################################################
    # Fit the tensor model and compute the fractional anisotropy map #
    ##################################################################
    context.set_progress(message='Processing voxel-wise DTI metrics.')
    tenmodel = TensorModel(gtab)
    tenfit = tenmodel.fit(dwi_img.get_data(), mask=brain_mask)
    FA = fractional_anisotropy(tenfit.evals)
    stopping_criterion = ThresholdStoppingCriterion(FA, 0.2)

    sphere = get_sphere("repulsion724")
    seed_mask_img = nib.load(inject_file_path)
    affine = seed_mask_img.affine
    seeds = utils.random_seeds_from_mask(seed_mask_img.get_data(),
                                         affine,
                                         seed_count_per_voxel=True,
                                         seeds_count=5000)

    if dataset == "HCPL":
        ################################################
        # Compute Fiber Orientation Distribution (CSD) #
        ################################################
        context.set_progress(message='Processing voxel-wise FOD estimation.')

        response, _ = auto_response_ssst(gtab, dwi_img.get_data(),
                                         roi_radii=10, fa_thr=0.7)
        csd_model = ConstrainedSphericalDeconvModel(gtab, response, sh_order=8)
        csd_fit = csd_model.fit(dwi_img.get_data(), mask=brain_mask)
        shm = csd_fit.shm_coeff

        prob_dg = ProbabilisticDirectionGetter.from_shcoeff(shm,
                                                            max_angle=20.,
                                                            sphere=sphere,
                                                            pmf_threshold=0.1)
    elif dataset == "DSI":
        context.set_progress(message='Processing voxel-wise DSI estimation.')
        dsmodel = DiffusionSpectrumModel(gtab)
        dsfit = dsmodel.fit(dwi_img.get_data())
        ODFs = dsfit.odf(sphere)
        prob_dg = ProbabilisticDirectionGetter.from_pmf(ODFs,
                                                        max_angle=20.,
                                                        sphere=sphere,
                                                        pmf_threshold=0.01)

    ###########################################
    # Compute DIPY Probabilistic Tractography #
    ###########################################
    context.set_progress(message='Processing tractography.')
    streamline_generator = LocalTracking(prob_dg, stopping_criterion, seeds,
                                         affine, step_size=.2, max_cross=1)
    streamlines = Streamlines(streamline_generator)
    # sft = StatefulTractogram(streamlines, seed_mask_img, Space.RASMM)
    # streamlines_file_path = "/root/streamlines.trk"
    # save_trk(sft, streamlines_file_path)

    ###########################################################################
    # Compute 3D volumes for the IronTract Challenge. For 'EPFL', we only     #
    # keep streamlines with length > 1mm. We compute the visitation  count    #
    # image and apply a small gaussian smoothing. The gaussian smoothing      #
    # is especially usefull to increase voxel coverage of deterministic       #
    # algorithms. The log of the smoothed visitation count map is then        #
    # iteratively thresholded producing 200 volumes/operation points.         #
    # For VUMC, additional streamline filtering is done using anatomical      #
    # priors (keeping only streamlines that intersect with at least one ROI). #
    ###########################################################################
    if postprocessing in ["EPFL", "ALL"]:
        context.set_progress(message='Processing density map (EPFL)')
        volume_folder = "/root/vol_epfl"
        output_epfl_zip_file_path = "/root/TrackyMcTrackface_EPFL_example.zip"
        os.mkdir(volume_folder)
        lengths = length(streamlines)
        streamlines = streamlines[lengths > 1]
        density = utils.density_map(streamlines, affine, seed_mask_img.shape)
        density = scipy.ndimage.gaussian_filter(density.astype("float32"), 0.5)

        log_density = np.log10(density + 1)
        max_density = np.max(log_density)
        for i, t in enumerate(np.arange(0, max_density, max_density / 200)):
            nbr = str(i)
            nbr = nbr.zfill(3)
            mask = log_density >= t
            vol_filename = os.path.join(volume_folder,
                                        "vol" + nbr + "_t" + str(t) + ".nii.gz")
            nib.Nifti1Image(mask.astype("int32"), affine,
                            seed_mask_img.header).to_filename(vol_filename)
        shutil.make_archive(output_epfl_zip_file_path[:-4], 'zip', volume_folder)

    if postprocessing in ["VUMC", "ALL"]:
        context.set_progress(message='Processing density map (VUMC)')
        ROIs_img = nib.load(VUMC_ROIs_file_path)
        volume_folder = "/root/vol_vumc"
        output_vumc_zip_file_path = "/root/TrackyMcTrackface_VUMC_example.zip"
        os.mkdir(volume_folder)
        lengths = length(streamlines)
        streamlines = streamlines[lengths > 1]

        rois = ROIs_img.get_fdata().astype(int)
        _, grouping = utils.connectivity_matrix(streamlines, affine, rois,
                                                inclusive=True,
                                                return_mapping=True,
                                                mapping_as_streamlines=False)
        streamlines = streamlines[grouping[(0, 1)]]

        density = utils.density_map(streamlines, affine, seed_mask_img.shape)
        density = scipy.ndimage.gaussian_filter(density.astype("float32"), 0.5)

        log_density = np.log10(density + 1)
        max_density = np.max(log_density)
        for i, t in enumerate(np.arange(0, max_density, max_density / 200)):
            nbr = str(i)
            nbr = nbr.zfill(3)
            mask = log_density >= t
            vol_filename = os.path.join(volume_folder,
                                        "vol" + nbr + "_t" + str(t) + ".nii.gz")
            nib.Nifti1Image(mask.astype("int32"), affine,
                            seed_mask_img.header).to_filename(vol_filename)
        shutil.make_archive(output_vumc_zip_file_path[:-4], 'zip', volume_folder)

    ###################
    # Upload the data #
    ###################
    context.set_progress(message='Uploading results...')
    #context.upload_file(fa_file_path, 'fa.nii.gz')
    # context.upload_file(fod_file_path, 'fod.nii.gz')
    # context.upload_file(streamlines_file_path, 'streamlines.trk')
    if postprocessing in ["EPFL", "ALL"]:
        context.upload_file(output_epfl_zip_file_path,
                            'TrackyMcTrackface_' + dataset +'_EPFL.zip')
    if postprocessing in ["VUMC", "ALL"]:
        context.upload_file(output_vumc_zip_file_path,
                            'TrackyMcTrackface_' + dataset +'_VUMC.zip')
distribution of small fiber bundles within each voxel. We can use this
distribution for probabilistic fiber tracking. One way to do this is to
represent the FOD using a discrete sphere. This discrete FOD can be used by the
Probabilistic Direction Getter as a PMF for sampling tracking directions. We
need to clip the FOD to use it as a PMF because the latter cannot have negative
values. (Ideally the FOD should be strictly positive, but because of noise
and/or model failures sometimes it can have negative values).
"""

from dipy.direction import ProbabilisticDirectionGetter
from dipy.data import small_sphere
from dipy.io.trackvis import save_trk

fod = csd_fit.odf(small_sphere)
pmf = fod.clip(min=0)
prob_dg = ProbabilisticDirectionGetter.from_pmf(pmf, max_angle=30.0, sphere=small_sphere)
streamlines = LocalTracking(prob_dg, classifier, seeds, affine, step_size=0.5)
save_trk("probabilistic_small_sphere.trk", streamlines, affine, labels.shape)

"""
One disadvantage of using a discrete PMF to represent possible tracking
directions is that it tends to take up a lot of memory (RAM). The size of the
PMF, the FOD in this case, must be equal to the number of possible tracking
directions on the hemisphere, and every voxel has a unique PMF. In this case
the data is ``(81, 106, 76)`` and ``small_sphere`` has 181 directions so the
FOD is ``(81, 106, 76, 181)``. One way to avoid sampling the PMF and holding it
in memory is to build the direction getter directly from the spherical harmonic
representation of the FOD. By using this approach, we can also use a larger
sphere, like ``default_sphere`` which has 362 directions on the hemisphere,
without having to worry about memory limitations.
"""
Exemple #29
0
def Analyze(img_d_path, img_s_path, gtab):

    # For fiber tracking, 3 things are needed
    # 1. Method for getting directions
    # 2. Method for identifying different tissue types
    # 3. seeds to begin tracking from

    print_info = False
    if print_info:
        print(
            "============================  Diffusion  ============================"
        )
        print(img_d)
        print(
            "============================ Structural ============================="
        )
        print(img_s)
        print("Labels:", np.unique(img_s.get_data()).astype('int'))

    # Load images
    img_d = nib.load(img_d_path)
    img_s = nib.load(img_s_path)

    # Resize the label (img_s)
    # 0. create an empty array the shape of diffusion image, without
    #    the 4th dimension
    # 1. Convert structural voxel coordinates into ref space (affine)
    # 2. Convert diffusion voxel coordinates into ref space (affine)
    # 3 For each diffusion ref coordinate,
    #   find the closest structural ref coordinate
    # 4. find its corresponding label, then input it to the empty array

    print_info_2 = True
    if print_info_2:
        #print(img_d.get_data().shape[])
        print(img_d.affine)
        print(img_s.affine)
        print(img_s._dataobj._shape)
        print(img_d._dataobj._shape)

    img_d_shape_3D = [j for i, j in enumerate(img_d.dataobj._shape) if i < 3]
    img_s_shape_3D = img_s.dataobj._shape

    #raise ValueError(" ")

    img_d_affine = img_d.affine
    img_s_affine = img_s.affine

    img_s_data = img_s.get_data()
    img_d_data = img_d.get_data()

    Vox_coord_s_i = np.arange(img_s_shape_3D[0])
    Vox_coord_s_j = np.arange(img_s_shape_3D[1])
    Vox_coord_s_k = np.arange(img_s_shape_3D[2])
    Ref_coord_s_i = Vox_coord_s_i * img_s_affine[0, 0] + img_s_affine[0, 3]
    Ref_coord_s_j = Vox_coord_s_j * img_s_affine[1, 1] + img_s_affine[1, 3]
    Ref_coord_s_k = Vox_coord_s_k * img_s_affine[2, 2] + img_s_affine[2, 3]
    #print(Ref_coord_s_j)

    reduced_size_label = np.zeros(img_d_shape_3D)

    for i in range(img_d_shape_3D[0]):
        for j in range(img_d_shape_3D[1]):
            for k in range(img_d_shape_3D[2]):
                # convert to reference coordinate
                ref_coord_i = i * img_d_affine[0, 0] + img_d_affine[0, 3]
                ref_coord_j = j * img_d_affine[1, 1] + img_d_affine[1, 3]
                ref_coord_k = k * img_d_affine[2, 2] + img_d_affine[2, 3]

                min_i_ind = bisect.bisect_left(np.sort(Ref_coord_s_i),
                                               ref_coord_i)
                min_j_ind = bisect.bisect_left(Ref_coord_s_j, ref_coord_j)
                min_k_ind = bisect.bisect_left(Ref_coord_s_k, ref_coord_k)
                #print(min_i_ind,min_j_ind,min_k_ind)
                #print(img_s_data[260-1-min_i_ind][311-1-min_j_ind][260-1-min_k_ind])
                #reduced_size_label[i][j][k]=img_s_data[260-1-min_i_ind][311-1-min_j_ind][260-1-min_k_ind]
                reduced_size_label[i][j][k] = img_s_data[260 - 1 - min_i_ind,
                                                         min_j_ind, min_k_ind]
    print("Label image reduction successful")

    # Divide Brainstem
    #msk_Midbrain
    yy, xx, zz = np.meshgrid(np.arange(174), np.arange(145), np.arange(145))

    pon_midbrain_msk = yy > (-115 / 78) * zz + 115
    midbrain_msk = zz > 48

    BS_msk = reduced_size_label == 16
    reduced_size_label_BS_seg = np.copy(reduced_size_label)
    reduced_size_label_BS_seg[BS_msk * pon_midbrain_msk] = 90
    reduced_size_label_BS_seg[BS_msk * midbrain_msk] = 120

    plt.figure(figsize=[11, 8.5])
    msk = reduced_size_label > 200
    temp_reduced_size_label = np.copy(reduced_size_label_BS_seg)
    temp_reduced_size_label[msk] = 0
    plt.imshow(temp_reduced_size_label[72, :, :], origin='lower')

    msk = reduced_size_label == 16
    temp_reduced_size_label = np.copy(reduced_size_label)
    temp_reduced_size_label[~msk] = 0
    plt.figure(figsize=[11, 8.5])
    plt.imshow(temp_reduced_size_label[72, :, :], origin='lower')

    #print("image display complete")
    #input1=raw_input("stopping")
    T1_path = "C:\\Users\\gham\\Desktop\\Human Brain\\Data\\102109\\102109_3T_Diffusion_preproc\\102109\\T1w\\"
    T1_file = "T1w_acpc_dc_restore_1.25.nii.gz"
    T1 = nib.load(T1_path + T1_file)
    T1_data = T1.get_data()
    plt.figure(figsize=[11, 8.5])
    plt.imshow(T1_data[72, :, :], origin='lower')
    plt.show()

    # implement the modified label
    reduced_size_label = reduced_size_label_BS_seg
    #raise ValueError("========== Stop =============")

    #White matter mask
    left_cerebral_wm = reduced_size_label == 2
    right_cerebral_wm = reduced_size_label == 41
    cerebral_wm = left_cerebral_wm + right_cerebral_wm
    left_cerebellum_wm = reduced_size_label == 7
    right_cerebellum_wm = reduced_size_label == 46
    cerebellum_wm = left_cerebellum_wm + right_cerebellum_wm

    CC = np.zeros(reduced_size_label.shape)
    for i in [251, 252, 253, 254, 255]:
        CC += reduced_size_label == i

    left_cortex = np.zeros(reduced_size_label.shape)
    for i in np.arange(1000, 1036):
        left_cortex += reduced_size_label == i
    right_cortex = np.zeros(reduced_size_label.shape)
    for i in np.arange(2000, 2036):
        right_cortex += reduced_size_label == i

    extra = np.zeros(reduced_size_label.shape)
    for i in [
            4, 5, 8, 10, 11, 12, 13, 14, 15, 16, 90, 120, 17, 18, 24, 26, 28,
            30, 31, 43, 44, 46, 47, 49, 50, 51, 52, 53, 54, 58, 60, 62, 63, 77,
            80, 85
    ]:
        extra += reduced_size_label == i
    #for i in np.arange(1001,1035):
    #    extra+=reduced_size_label==i

    wm = cerebral_wm + cerebellum_wm + CC + extra + left_cortex + right_cortex

    #seed_mask1=np.zeros(reduced_size_label.shape)
    #for i in [16]:
    #    seed_mask1+=reduced_size_label==i
    #seed_mask2=np.zeros(reduced_size_label.shape)

    #seed_mask=seed_mask1+seed_mask2

    #seed_mask=(reduced_size_label==16)+(reduced_size_label==2)+(reduced_size_label==41)
    #seeds = utils.seeds_from_mask(seed_mask, density=1, affine=img_d_affine)
    seeds = utils.seeds_from_mask(wm, density=1, affine=img_d_affine)

    # Constrained Spherical Deconvolution
    #reference: https://www.imagilys.com/constrained-spherical-deconvolution-CSD-tractography/
    csd_model = ConstrainedSphericalDeconvModel(gtab, None, sh_order=6)
    csd_fit = csd_model.fit(img_d_data, mask=wm)

    print("CSD model complete")

    # reconstruction
    from dipy.reconst.shm import CsaOdfModel

    csa_model = CsaOdfModel(gtab, sh_order=6)
    gfa = csa_model.fit(img_d_data, mask=wm).gfa
    classifier = ThresholdTissueClassifier(gfa, .25)
    # =============================================================================
    #     import dipy.reconst.dti as dti
    #     from dipy.reconst.dti import fractional_anisotropy
    #     tensor_model = dti.TensorModel(gtab)
    #     tenfit=tensor_model.fit(img_d_data,mask=wm) #COMPUTATIONALL INTENSE
    #     FA=fractional_anisotropy(tenfit.evals)
    #     classifier=ThresholdTissueClassifier(FA,.1) # 0.2 enough?
    # =============================================================================

    print("Classifier complete")

    # Probabilistic direction getter
    from dipy.direction import ProbabilisticDirectionGetter
    from dipy.data import small_sphere
    from dipy.io.streamline import save_trk

    fod = csd_fit.odf(small_sphere)
    pmf = fod.clip(min=0)
    prob_dg = ProbabilisticDirectionGetter.from_pmf(pmf,
                                                    max_angle=75.,
                                                    sphere=small_sphere)
    streamlines_generator = LocalTracking(prob_dg,
                                          classifier,
                                          seeds,
                                          img_d_affine,
                                          step_size=.5)
    save_trk("probabilistic_small_sphere.trk", streamlines_generator,
             img_d_affine, reduced_size_label.shape)

    astreamlines = np.array(list(streamlines_generator))
    endpoints = np.array(
        [st[0::len(st) - 1] for st in astreamlines if len(st) > 1])

    print(endpoints)
    with open('endpoints-shorder=6-maxangle=75-gfa=0.25-BSdiv-v3.pkl',
              'wb') as f:
        pickle.dump(endpoints, f)
    with open("reduced_label-shorder=6-maxangle=75-gfa=0.25-BSdiv-v3.pkl",
              "wb") as g:
        pickle.dump(reduced_size_label, g)
Exemple #30
0
def test_ProbabilisticDirectionGetter():
    # Test the constructors and errors of the ProbabilisticDirectionGetter

    class SillyModel(SphHarmModel):

        sh_order = 4

        def fit(self, data, mask=None):
            coeff = np.zeros(data.shape[:-1] + (15,))
            return SphHarmFit(self, coeff, mask=None)

    model = SillyModel(gtab=None)
    data = np.zeros((3, 3, 3, 7))

    # Test if the tracking works on different dtype of the same data.
    for dtype in [np.float32, np.float64]:
        fit = model.fit(data.astype(dtype))

        # Sample point and direction
        point = np.zeros(3)
        dir = unit_octahedron.vertices[0].copy()

        # make a dg from a fit
        with warnings.catch_warnings():
            warnings.filterwarnings(
                "ignore", message=descoteaux07_legacy_msg,
                category=PendingDeprecationWarning)
            dg = ProbabilisticDirectionGetter.from_shcoeff(
                fit.shm_coeff, 90, unit_octahedron)

        state = dg.get_direction(point, dir)
        npt.assert_equal(state, 1)

        # Make a dg from a pmf
        N = unit_octahedron.theta.shape[0]
        pmf = np.zeros((3, 3, 3, N))
        dg = ProbabilisticDirectionGetter.from_pmf(pmf, 90, unit_octahedron)
        state = dg.get_direction(point, dir)
        npt.assert_equal(state, 1)

        # pmf shape must match sphere
        bad_pmf = pmf[..., 1:]
        npt.assert_raises(ValueError, ProbabilisticDirectionGetter.from_pmf,
                          bad_pmf, 90, unit_octahedron)

        # pmf must have 4 dimensions
        bad_pmf = pmf[0, ...]
        npt.assert_raises(ValueError, ProbabilisticDirectionGetter.from_pmf,
                          bad_pmf, 90, unit_octahedron)
        # pmf cannot have negative values
        pmf[0, 0, 0, 0] = -1
        npt.assert_raises(ValueError, ProbabilisticDirectionGetter.from_pmf,
                          pmf, 90, unit_octahedron)

        # Check basis_type keyword
        with warnings.catch_warnings():
            warnings.filterwarnings(
                "ignore", message=tournier07_legacy_msg,
                category=PendingDeprecationWarning)

            dg = ProbabilisticDirectionGetter.from_shcoeff(
                fit.shm_coeff, 90, unit_octahedron, basis_type="tournier07")

        npt.assert_raises(ValueError,
                          ProbabilisticDirectionGetter.from_shcoeff,
                          fit.shm_coeff, 90, unit_octahedron,
                          basis_type="not a basis")
	def _run_interface(self, runtime):
		import numpy as np
		import nibabel as nib
		from dipy.io import read_bvals_bvecs
		from dipy.core.gradients import gradient_table
		from nipype.utils.filemanip import split_filename

		# Loading the data
		fname = self.inputs.in_file
		img = nib.load(fname)
		data = img.get_data()
		affine = img.get_affine()

		FA_fname = self.inputs.FA_file
		FA_img = nib.load(FA_fname)
		fa = FA_img.get_data()
		affine = FA_img.get_affine()
		affine = np.matrix.round(affine)

		mask_fname = self.inputs.brain_mask
		mask_img = nib.load(mask_fname)
		mask = mask_img.get_data()

		bval_fname = self.inputs.bval
		bvals = np.loadtxt(bval_fname)

		bvec_fname = self.inputs.bvec
		bvecs = np.loadtxt(bvec_fname)
		bvecs = np.vstack([bvecs[0,:],bvecs[1,:],bvecs[2,:]]).T
		gtab = gradient_table(bvals, bvecs)

		# Creating a white matter mask
		fa = fa*mask
		white_matter = fa >= 0.2

		# Creating a seed mask
		from dipy.tracking import utils
		seeds = utils.seeds_from_mask(white_matter, density=[2, 2, 2], affine=affine)

		# Fitting the CSA model
		from dipy.reconst.shm import CsaOdfModel
		from dipy.data import default_sphere
		from dipy.direction import peaks_from_model
		csa_model = CsaOdfModel(gtab, sh_order=8)
		csa_peaks = peaks_from_model(csa_model, data, default_sphere,
		                             relative_peak_threshold=.8,
		                             min_separation_angle=45,
		                             mask=white_matter)

		from dipy.tracking.local import ThresholdTissueClassifier
		classifier = ThresholdTissueClassifier(csa_peaks.gfa, .25)

		# CSD model
		from dipy.reconst.csdeconv import (ConstrainedSphericalDeconvModel, auto_response)
		response, ratio = auto_response(gtab, data, roi_radius=10, fa_thr=0.7)
		csd_model = ConstrainedSphericalDeconvModel(gtab, response, sh_order=8)
		csd_fit = csd_model.fit(data, mask=white_matter)

		from dipy.direction import ProbabilisticDirectionGetter
		prob_dg = ProbabilisticDirectionGetter.from_shcoeff(csd_fit.shm_coeff,
		                                                    max_angle=45.,
		                                                    sphere=default_sphere)

		# Tracking
		from dipy.tracking.local import LocalTracking
		streamlines = LocalTracking(prob_dg, classifier, seeds, affine,
		                            step_size=.5, maxlen=200, max_cross=1)

		# Compute streamlines and store as a list.
		streamlines = list(streamlines)

		# Saving the trackfile
		from dipy.io.trackvis import save_trk
		_, base, _ = split_filename(fname)
		save_trk(base + '_CSDprob.trk', streamlines, affine, fa.shape)

		return runtime
Exemple #32
0
def tracking(shm_file, mask_file, outdir, force_overwrite, particles,
             step_size, max_lenght, max_angle, algorithm, wpid_seeds_info):
    ''' Tracking function that will run in parallel

        Params:
            shm_file: SHM file computed from the dwi file
            mask_file: mask were to perform tractography
            outdir: Directory were to save streamlines
            force_overwrite: if True, existing files will be overwriten
            step_size: size in mm of each step in the tracking
            max_lenght: maximum lenght of each streamline
            max_angle: maximum angle at each step of tracking
            algoright: either 'probabilistic' or 'deterministic'
            wpid_seeds_info: tuple which contains:
                - wpid: The id of this worker
                - seeds: One list for each seed with points to track from
                - info: CIFTI information for each seed:
                    -mtype: A valid CIFTI MODELTYPE
                    -name: A valid CIFTI BRAINSTRUCTURE
                    -coord: Voxel or vertex to which the seed makes reference
                    -size: size of the CIFTI SURFACE (if applies)
        Returns:
            list of streamlines '''
    import citrix
    import streamlines as sl

    from dipy.data import default_sphere
    from dipy.tracking.local_tracking import LocalTracking
    from dipy.tracking.stopping_criterion import BinaryStoppingCriterion

    wpid, (seeds, cifti_info) = wpid_seeds_info

    logging.debug("Worker {} started".format(wpid))

    # Check if file exists
    outfile = os.path.join(outdir, "stream_{}.trk".format(wpid))

    if os.path.isfile(outfile) and not force_overwrite:
        print("File already exists, use the -f flag to overwrite it")
        return

    shm = citrix.load(shm_file)
    shm_data = shm.get_data()

    mask_nib = citrix.load(mask_file)
    mask = mask_nib.get_data()

    if algorithm == 'deterministic':
        directions = deterministic.from_shcoeff(shm_data, max_angle,
                                                default_sphere)
    else:
        directions = probabilistic.from_shcoeff(shm_data, max_angle,
                                                default_sphere)

    stop_criterion = BinaryStoppingCriterion(mask)

    percent = max(1, len(seeds) / 5)
    streamlines = []
    used_seeds = []

    for i, s in enumerate(seeds):
        if i % percent == 0:
            logging.debug("{}, {}/{} seeds".format(wpid, i, len(seeds)))

        # Repeat the seeds as long as needed
        if len(s) == 3:
            # It's one point
            s = [s]

        repeated_seeds = [ss for ss in s for _ in range(2 * particles)]

        res = LocalTracking(directions,
                            stop_criterion,
                            repeated_seeds,
                            shm.affine,
                            step_size=step_size,
                            maxlen=max_lenght,
                            return_all=False)

        for streamline in itertools.islice(res, particles * len(s)):
            if streamline is not None and len(streamline) > 1:
                streamlines.append(streamline)

            if cifti_info[i][0] == 'CIFTI_MODEL_TYPE_SURFACE':
                used_seeds.append(cifti_info[i][2])
            else:
                used_seeds.append([int(cf) for cf in cifti_info[i][2]])

    streamlines = sl.Streamlines(streamlines, shm.affine, shm.shape[:3],
                                 shm.header.get_zooms()[:3])

    numpy.savetxt(os.path.join(outdir, "info_{}.txt".format(wpid)), used_seeds)

    sl.io.save(streamlines, outfile)

    logging.debug("Worker {} finished".format(wpid))
    return
Exemple #33
0
det_streamline_generator = LocalTracking(pam,
                                         cmc_classifier,
                                         seeds,
                                         affine,
                                         step_size=step_size)

# The line below is failing not sure why
# detstreamlines = Streamlines(det_streamline_generator)

detstreamlines = list(det_streamline_generator)
detstreamlines = Streamlines(detstreamlines)
save_trk('det.trk', detstreamlines, affine=np.eye(4),
         vox_size=vox_size, shape=shape)

dg = ProbabilisticDirectionGetter.from_shcoeff(pam.shm_coeff,
                                               max_angle=20.,
                                               sphere=sphere)

# Particle Filtering Tractography
pft_streamline_generator = ParticleFilteringTracking(dg,
                                                     cmc_classifier,
                                                     seeds,
                                                     affine,
                                                     max_cross=1,
                                                     step_size=step_size,
                                                     maxlen=1000,
                                                     pft_back_tracking_dist=2,
                                                     pft_front_tracking_dist=1,
                                                     particle_count=15,
                                                     return_all=False)
# The line below is failing not sure why
Exemple #34
0
    def particle_tracking(self):

        self.sphere = get_sphere("repulsion724")
        if self.mod_type == "det":
            maxcrossing = 1
            print("Obtaining peaks from model...")
            self.mod_peaks = peaks_from_model(
                self.mod,
                self.data,
                self.sphere,
                relative_peak_threshold=0.5,
                min_separation_angle=25,
                mask=self.wm_in_dwi_data,
                npeaks=5,
                normalize_peaks=True,
            )
            qa_tensor.create_qa_figure(self.mod_peaks.peak_dirs,
                                       self.mod_peaks.peak_values,
                                       self.qa_tensor_out, self.mod_func)
            self.streamline_generator = ParticleFilteringTracking(
                self.mod_peaks,
                self.tiss_classifier,
                self.seeds,
                self.stream_affine,
                max_cross=maxcrossing,
                step_size=0.5,
                maxlen=1000,
                pft_back_tracking_dist=2,
                pft_front_tracking_dist=1,
                particle_count=15,
                return_all=True,
            )
        elif self.mod_type == "prob":
            maxcrossing = 2
            print("Preparing probabilistic tracking...")
            print("Fitting model to data...")
            self.mod_fit = self.mod.fit(self.data, self.wm_in_dwi_data)
            print("Building direction-getter...")
            self.mod_peaks = peaks_from_model(
                self.mod,
                self.data,
                self.sphere,
                relative_peak_threshold=0.5,
                min_separation_angle=25,
                mask=self.wm_in_dwi_data,
                npeaks=5,
                normalize_peaks=True,
            )
            qa_tensor.create_qa_figure(self.mod_peaks.peak_dirs,
                                       self.mod_peaks.peak_values,
                                       self.qa_tensor_out, self.mod_func)
            try:
                print(
                    "Proceeding using spherical harmonic coefficient from model estimation..."
                )
                self.pdg = ProbabilisticDirectionGetter.from_shcoeff(
                    self.mod_fit.shm_coeff, max_angle=60.0, sphere=self.sphere)
            except:
                print("Proceeding using FOD PMF from model estimation...")
                self.fod = self.mod_fit.odf(self.sphere)
                self.pmf = self.fod.clip(min=0)
                self.pdg = ProbabilisticDirectionGetter.from_pmf(
                    self.pmf, max_angle=60.0, sphere=self.sphere)
            self.streamline_generator = ParticleFilteringTracking(
                self.pdg,
                self.tiss_classifier,
                self.seeds,
                self.stream_affine,
                max_cross=maxcrossing,
                step_size=0.5,
                maxlen=1000,
                pft_back_tracking_dist=2,
                pft_front_tracking_dist=1,
                particle_count=15,
                return_all=True,
            )
        print("Reconstructing tractogram streamlines...")
        self.streamlines = Streamlines(self.streamline_generator)
        return self.streamlines
Exemple #35
0
def test_probabilistic_odf_weighted_tracker():
    """This tests that the Probabalistic Direction Getter plays nice
    LocalTracking and produces reasonable streamlines in a simple example.
    """
    sphere = HemiSphere.from_sphere(unit_octahedron)

    # A simple image with three possible configurations, a vertical tract,
    # a horizontal tract and a crossing
    pmf_lookup = np.array([[0., 0., 1.], [1., 0., 0.], [0., 1., 0.],
                           [.6, .4, 0.]])
    simple_image = np.array([
        [0, 1, 0, 0, 0, 0],
        [0, 1, 0, 0, 0, 0],
        [0, 3, 2, 2, 2, 0],
        [0, 1, 0, 0, 0, 0],
        [0, 1, 0, 0, 0, 0],
    ])

    simple_image = simple_image[..., None]
    pmf = pmf_lookup[simple_image]

    seeds = [np.array([1., 1., 0.])] * 30

    mask = (simple_image > 0).astype(float)
    sc = ThresholdStoppingCriterion(mask, .5)

    dg = ProbabilisticDirectionGetter.from_pmf(pmf,
                                               90,
                                               sphere,
                                               pmf_threshold=0.1)
    streamlines = LocalTracking(dg, sc, seeds, np.eye(4), 1.)

    expected = [
        np.array([[0., 1., 0.], [1., 1., 0.], [2., 1., 0.], [2., 2., 0.],
                  [2., 3., 0.], [2., 4., 0.]]),
        np.array([[0., 1., 0.], [1., 1., 0.], [2., 1., 0.], [3., 1., 0.],
                  [4., 1., 0.]])
    ]

    def allclose(x, y):
        return x.shape == y.shape and np.allclose(x, y)

    path = [False, False]
    for sl in streamlines:
        if allclose(sl, expected[0]):
            path[0] = True
        elif allclose(sl, expected[1]):
            path[1] = True
        else:
            raise AssertionError()
    npt.assert_(all(path))

    # The first path is not possible if 90 degree turns are excluded
    dg = ProbabilisticDirectionGetter.from_pmf(pmf,
                                               80,
                                               sphere,
                                               pmf_threshold=0.1)
    streamlines = LocalTracking(dg, sc, seeds, np.eye(4), 1.)

    for sl in streamlines:
        npt.assert_(np.allclose(sl, expected[1]))

    # The first path is not possible if pmf_threshold > 0.67
    # 0.4/0.6 < 2/3, multiplying the pmf should not change the ratio
    dg = ProbabilisticDirectionGetter.from_pmf(10 * pmf,
                                               90,
                                               sphere,
                                               pmf_threshold=0.67)
    streamlines = LocalTracking(dg, sc, seeds, np.eye(4), 1.)

    for sl in streamlines:
        npt.assert_(np.allclose(sl, expected[1]))

    # Test non WM seed position
    seeds = [[0, 0, 0], [5, 5, 5]]
    streamlines = LocalTracking(dg,
                                sc,
                                seeds,
                                np.eye(4),
                                0.2,
                                max_cross=1,
                                return_all=True)
    streamlines = Streamlines(streamlines)
    npt.assert_(len(streamlines[0]) == 1)  # INVALIDPOINT
    npt.assert_(len(streamlines[1]) == 1)  # OUTSIDEIMAGE

    # Test that all points are within the image volume
    seeds = seeds_from_mask(np.ones(mask.shape), np.eye(4), density=2)
    streamline_generator = LocalTracking(dg,
                                         sc,
                                         seeds,
                                         np.eye(4),
                                         0.5,
                                         return_all=True)
    streamlines = Streamlines(streamline_generator)
    for s in streamlines:
        npt.assert_(np.all((s + 0.5).astype(int) >= 0))
        npt.assert_(np.all((s + 0.5).astype(int) < mask.shape))
    # Test that the number of streamline return with return_all=True equal the
    # number of seeds places

    npt.assert_(np.array([len(streamlines) == len(seeds)]))

    # Test reproducibility
    tracking_1 = Streamlines(
        LocalTracking(dg, sc, seeds, np.eye(4), 0.5, random_seed=0))._data
    tracking_2 = Streamlines(
        LocalTracking(dg, sc, seeds, np.eye(4), 0.5, random_seed=0))._data
    npt.assert_equal(tracking_1, tracking_2)
Exemple #36
0
            if seeds[i][0]>199.:
                seeds[i][0]=398-seeds[i][0]
            if seeds[i][1]>399.:
                seeds[i][1]=798-seeds[i][1]
            if seeds[i][2]>199.:
                seeds[i][2]=398-seeds[i][2]
            for j in range(3):
                if seeds[i][j]<0.:
                    seeds[i][j]=-seeds[i][j]
    et3 = time.time() - st3
    print 'seeding transformation finished, the total seeds are {}, running time is {}'.format(seeds.shape[0], et3)

    print 'generating streamlines begins'
    st4 = time.time()
    fod_coeff = csd_peaks.shm_coeff
    prob_dg = ProbabilisticDirectionGetter.from_shcoeff(fod_coeff, max_angle=70.,relative_peak_threshold=0.1,
                                                        sphere=default_sphere)

    del data, img, labels, labels_img, csd_peaks, csd_model
    gc.collect()
    print 'data, img, labels, labels_img, csd_peaks, csd_model delete to save memory'

    classifier = BinaryTissueClassifier(mask)
    streamline_generator = LocalTracking(prob_dg, classifier, seeds, affine, step_size=.5)
    affine = streamline_generator.affine

    streamlines = Streamlines(streamline_generator)
    et4 = time.time() - st4
#lengths = [length(sl).astype(np.int) for sl in streamlines]
#print 'generating streamlines finished, the length is {}~{}, running time is {}'.format(np.min(lengths), np.max(lengths), et4)

    del bm, mask, fod_coeff, prob_dg, classifier #, lengths
Exemple #37
0
def test_particle_filtering_tractography():
    """This tests that the ParticleFilteringTracking produces
    more streamlines connecting the gray matter than LocalTracking.
    """
    sphere = get_sphere('repulsion100')
    step_size = 0.2

    # Simple tissue masks
    simple_wm = np.array([[0, 0, 0, 0, 0, 0], [0, 0, 1, 0, 0, 0],
                          [0, 1, 1, 1, 0, 0], [0, 1, 1, 1, 0, 0],
                          [0, 0, 0, 0, 0, 0]])
    simple_wm = np.dstack([
        np.zeros(simple_wm.shape), simple_wm, simple_wm, simple_wm,
        np.zeros(simple_wm.shape)
    ])
    simple_gm = np.array([[1, 1, 0, 0, 0, 0], [1, 1, 0, 0, 0, 0],
                          [0, 1, 0, 0, 1, 0], [0, 0, 0, 0, 1, 0],
                          [0, 0, 0, 0, 0, 0]])
    simple_gm = np.dstack([
        np.zeros(simple_gm.shape), simple_gm, simple_gm, simple_gm,
        np.zeros(simple_gm.shape)
    ])
    simple_csf = np.ones(simple_wm.shape) - simple_wm - simple_gm

    sc = ActStoppingCriterion.from_pve(simple_wm, simple_gm, simple_csf)
    seeds = seeds_from_mask(simple_wm, np.eye(4), density=2)

    # Random pmf in every voxel
    shape_img = list(simple_wm.shape)
    shape_img.extend([sphere.vertices.shape[0]])
    np.random.seed(0)  # Random number generator initialization
    pmf = np.random.random(shape_img)

    # Test that PFT recover equal or more streamlines than localTracking
    dg = ProbabilisticDirectionGetter.from_pmf(pmf, 60, sphere)
    local_streamlines_generator = LocalTracking(dg,
                                                sc,
                                                seeds,
                                                np.eye(4),
                                                step_size,
                                                max_cross=1,
                                                return_all=False)
    local_streamlines = Streamlines(local_streamlines_generator)

    pft_streamlines_generator = ParticleFilteringTracking(
        dg,
        sc,
        seeds,
        np.eye(4),
        step_size,
        max_cross=1,
        return_all=False,
        pft_back_tracking_dist=1,
        pft_front_tracking_dist=0.5)
    pft_streamlines = Streamlines(pft_streamlines_generator)

    npt.assert_(np.array([len(pft_streamlines) > 0]))
    npt.assert_(np.array([len(pft_streamlines) >= len(local_streamlines)]))

    # Test that all points are equally spaced
    for l in [1, 2, 5, 10, 100]:
        pft_streamlines = ParticleFilteringTracking(dg,
                                                    sc,
                                                    seeds,
                                                    np.eye(4),
                                                    step_size,
                                                    max_cross=1,
                                                    return_all=True,
                                                    maxlen=l)
        for s in pft_streamlines:
            for i in range(len(s) - 1):
                npt.assert_almost_equal(np.linalg.norm(s[i] - s[i + 1]),
                                        step_size)
    # Test that all points are within the image volume
    seeds = seeds_from_mask(np.ones(simple_wm.shape), np.eye(4), density=1)
    pft_streamlines_generator = ParticleFilteringTracking(dg,
                                                          sc,
                                                          seeds,
                                                          np.eye(4),
                                                          step_size,
                                                          max_cross=1,
                                                          return_all=True)
    pft_streamlines = Streamlines(pft_streamlines_generator)

    for s in pft_streamlines:
        npt.assert_(np.all((s + 0.5).astype(int) >= 0))
        npt.assert_(np.all((s + 0.5).astype(int) < simple_wm.shape))

    # Test that the number of streamline return with return_all=True equal the
    # number of seeds places
    npt.assert_(np.array([len(pft_streamlines) == len(seeds)]))

    # Test non WM seed position
    seeds = [[0, 5, 4], [0, 0, 1], [50, 50, 50]]
    pft_streamlines_generator = ParticleFilteringTracking(dg,
                                                          sc,
                                                          seeds,
                                                          np.eye(4),
                                                          step_size,
                                                          max_cross=1,
                                                          return_all=True)
    pft_streamlines = Streamlines(pft_streamlines_generator)

    npt.assert_equal(len(pft_streamlines[0]), 3)  # INVALIDPOINT
    npt.assert_equal(len(pft_streamlines[1]), 3)  # ENDPOINT
    npt.assert_equal(len(pft_streamlines[2]), 1)  # OUTSIDEIMAGE

    # Test with wrong StoppingCriterion type
    sc_bin = BinaryStoppingCriterion(simple_wm)
    npt.assert_raises(
        ValueError, lambda: ParticleFilteringTracking(dg, sc_bin, seeds,
                                                      np.eye(4), step_size))
    # Test with invalid back/front tracking distances
    npt.assert_raises(
        ValueError,
        lambda: ParticleFilteringTracking(dg,
                                          sc,
                                          seeds,
                                          np.eye(4),
                                          step_size,
                                          pft_back_tracking_dist=0,
                                          pft_front_tracking_dist=0))
    npt.assert_raises(
        ValueError, lambda: ParticleFilteringTracking(
            dg, sc, seeds, np.eye(4), step_size, pft_back_tracking_dist=-1))
    npt.assert_raises(
        ValueError,
        lambda: ParticleFilteringTracking(dg,
                                          sc,
                                          seeds,
                                          np.eye(4),
                                          step_size,
                                          pft_back_tracking_dist=0,
                                          pft_front_tracking_dist=-2))

    # Test with invalid affine shape
    npt.assert_raises(
        ValueError,
        lambda: ParticleFilteringTracking(dg, sc, seeds, np.eye(3), step_size))

    # Test with invalid maxlen
    npt.assert_raises(
        ValueError, lambda: ParticleFilteringTracking(
            dg, sc, seeds, np.eye(4), step_size, maxlen=0))
    npt.assert_raises(
        ValueError, lambda: ParticleFilteringTracking(
            dg, sc, seeds, np.eye(4), step_size, maxlen=-1))

    # Test with invalid particle count
    npt.assert_raises(
        ValueError, lambda: ParticleFilteringTracking(
            dg, sc, seeds, np.eye(4), step_size, particle_count=0))
    npt.assert_raises(
        ValueError, lambda: ParticleFilteringTracking(
            dg, sc, seeds, np.eye(4), step_size, particle_count=-1))

    # Test reproducibility
    tracking1 = Streamlines(
        ParticleFilteringTracking(dg,
                                  sc,
                                  seeds,
                                  np.eye(4),
                                  step_size,
                                  random_seed=0))._data
    tracking2 = Streamlines(
        ParticleFilteringTracking(dg,
                                  sc,
                                  seeds,
                                  np.eye(4),
                                  step_size,
                                  random_seed=0))._data
    npt.assert_equal(tracking1, tracking2)
Exemple #38
0
def track_ensemble(dwi_data,
                   target_samples,
                   atlas_data_wm_gm_int,
                   parcels,
                   mod_fit,
                   tiss_classifier,
                   sphere,
                   directget,
                   curv_thr_list,
                   step_list,
                   track_type,
                   maxcrossing,
                   max_length,
                   roi_neighborhood_tol,
                   min_length,
                   waymask,
                   n_seeds_per_iter=100,
                   pft_back_tracking_dist=2,
                   pft_front_tracking_dist=1,
                   particle_count=15):
    """
    Perform native-space ensemble tractography, restricted to a vector of ROI masks.

    dwi_data : array
        4D array of dwi data.
    target_samples : int
        Total number of streamline samples specified to generate streams.
    atlas_data_wm_gm_int : array
        3D int32 numpy array of atlas parcellation intensities from Nifti1Image in T1w-warped native diffusion space,
        restricted to wm-gm interface.
    parcels : list
        List of 3D boolean numpy arrays of atlas parcellation ROI masks from a Nifti1Image in T1w-warped native
        diffusion space.
    mod : obj
        Connectivity reconstruction model.
    tiss_classifier : str
        Tissue classification method.
    sphere : obj
        DiPy object for modeling diffusion directions on a sphere.
    directget : str
        The statistical approach to tracking. Options are: det (deterministic), closest (clos), boot (bootstrapped),
        and prob (probabilistic).
    curv_thr_list : list
        List of integer curvature thresholds used to perform ensemble tracking.
    step_list : list
        List of float step-sizes used to perform ensemble tracking.
    track_type : str
        Tracking algorithm used (e.g. 'local' or 'particle').
    maxcrossing : int
        Maximum number if diffusion directions that can be assumed per voxel while tracking.
    max_length : int
        Maximum fiber length threshold in mm to restrict tracking.
    roi_neighborhood_tol : float
        Distance (in the units of the streamlines, usually mm). If any
        coordinate in the streamline is within this distance from the center
        of any voxel in the ROI, the filtering criterion is set to True for
        this streamline, otherwise False. Defaults to the distance between
        the center of each voxel and the corner of the voxel.
    min_length : int
        Minimum fiber length threshold in mm.
    waymask : str
        Path to a Nifti1Image in native diffusion space to constrain tractography.
    n_seeds_per_iter : int
        Number of seeds from which to initiate tracking for each unique ensemble combination.
        By default this is set to 200.
    particle_count
        pft_back_tracking_dist : float
        Distance in mm to back track before starting the particle filtering
        tractography. The total particle filtering tractography distance is
        equal to back_tracking_dist + front_tracking_dist. By default this is set to 2 mm.
    pft_front_tracking_dist : float
        Distance in mm to run the particle filtering tractography after the
        the back track distance. The total particle filtering tractography
        distance is equal to back_tracking_dist + front_tracking_dist. By
        default this is set to 1 mm.
    particle_count : int
        Number of particles to use in the particle filter.

    Returns
    -------
    streamlines : ArraySequence
        DiPy list/array-like object of streamline points from tractography.
    """
    from colorama import Fore, Style
    from dipy.tracking import utils
    from dipy.tracking.streamline import Streamlines, select_by_rois
    from dipy.tracking.local_tracking import LocalTracking, ParticleFilteringTracking
    from dipy.direction import ProbabilisticDirectionGetter, BootDirectionGetter, ClosestPeakDirectionGetter, DeterministicMaximumDirectionGetter

    if waymask:
        waymask_data = nib.load(waymask).get_fdata().astype('bool')

    # Commence Ensemble Tractography
    parcel_vec = list(np.ones(len(parcels)).astype('bool'))
    streamlines = nib.streamlines.array_sequence.ArraySequence()
    ix = 0
    circuit_ix = 0
    stream_counter = 0
    while int(stream_counter) < int(target_samples):
        for curv_thr in curv_thr_list:
            print("%s%s" % ('Curvature: ', curv_thr))

            # Instantiate DirectionGetter
            if directget == 'prob':
                dg = ProbabilisticDirectionGetter.from_shcoeff(
                    mod_fit, max_angle=float(curv_thr), sphere=sphere)
            elif directget == 'boot':
                dg = BootDirectionGetter.from_data(dwi_data,
                                                   mod_fit,
                                                   max_angle=float(curv_thr),
                                                   sphere=sphere)
            elif directget == 'clos':
                dg = ClosestPeakDirectionGetter.from_shcoeff(
                    mod_fit, max_angle=float(curv_thr), sphere=sphere)
            elif directget == 'det':
                dg = DeterministicMaximumDirectionGetter.from_shcoeff(
                    mod_fit, max_angle=float(curv_thr), sphere=sphere)
            else:
                raise ValueError(
                    'ERROR: No valid direction getter(s) specified.')

            for step in step_list:
                print("%s%s" % ('Step: ', step))

                # Perform wm-gm interface seeding, using n_seeds at a time
                seeds = utils.random_seeds_from_mask(
                    atlas_data_wm_gm_int > 0,
                    seeds_count=n_seeds_per_iter,
                    seed_count_per_voxel=False,
                    affine=np.eye(4))
                if len(seeds) == 0:
                    raise RuntimeWarning(
                        'Warning: No valid seed points found in wm-gm interface...'
                    )

                print(seeds)

                # Perform tracking
                if track_type == 'local':
                    streamline_generator = LocalTracking(
                        dg,
                        tiss_classifier,
                        seeds,
                        np.eye(4),
                        max_cross=int(maxcrossing),
                        maxlen=int(max_length),
                        step_size=float(step),
                        return_all=True)
                elif track_type == 'particle':
                    streamline_generator = ParticleFilteringTracking(
                        dg,
                        tiss_classifier,
                        seeds,
                        np.eye(4),
                        max_cross=int(maxcrossing),
                        step_size=float(step),
                        maxlen=int(max_length),
                        pft_back_tracking_dist=pft_back_tracking_dist,
                        pft_front_tracking_dist=pft_front_tracking_dist,
                        particle_count=particle_count,
                        return_all=True)
                else:
                    raise ValueError(
                        'ERROR: No valid tracking method(s) specified.')

                # Filter resulting streamlines by roi-intersection characteristics
                roi_proximal_streamlines = Streamlines(
                    select_by_rois(streamline_generator,
                                   affine=np.eye(4),
                                   rois=parcels,
                                   include=parcel_vec,
                                   mode='any',
                                   tol=roi_neighborhood_tol))

                print("%s%s" %
                      ('Qualifying Streamlines by node intersection: ',
                       len(roi_proximal_streamlines)))

                roi_proximal_streamlines = nib.streamlines.array_sequence.ArraySequence(
                    [
                        s for s in roi_proximal_streamlines
                        if len(s) > float(min_length)
                    ])

                print("%s%s" %
                      ('Qualifying Streamlines by minimum length criterion: ',
                       len(roi_proximal_streamlines)))

                if waymask:
                    roi_proximal_streamlines = roi_proximal_streamlines[
                        utils.near_roi(roi_proximal_streamlines,
                                       np.eye(4),
                                       waymask_data,
                                       tol=roi_neighborhood_tol,
                                       mode='any')]
                    print("%s%s" %
                          ('Qualifying Streamlines by waymask proximity: ',
                           len(roi_proximal_streamlines)))

                # Repeat process until target samples condition is met
                ix = ix + 1
                for s in roi_proximal_streamlines:
                    stream_counter = stream_counter + len(s)
                    streamlines.append(s)
                    if int(stream_counter) >= int(target_samples):
                        break
                    else:
                        continue

                # Cleanup memory
                del seeds, roi_proximal_streamlines, streamline_generator

            del dg

        circuit_ix = circuit_ix + 1
        print(
            "%s%s%s%s%s" %
            ('Completed hyperparameter circuit: ', circuit_ix,
             '...\nCumulative Streamline Count: ', Fore.CYAN, stream_counter))
        print(Style.RESET_ALL)

    print('\n')

    return streamlines
response, ratio = auto_response(gtab, data, roi_radius=10, fa_thr=0.7)
csd_model = ConstrainedSphericalDeconvModel(gtab, response, sh_order=6)
csd_fit = csd_model.fit(data, mask=white_matter)

"""
Next we'll need to make a ``ProbabilisticDirectionGetter``. Because the CSD
model represents the FOD using the spherical harmonic basis, we can use the
``from_shcoeff`` method to create the direction getter. This direction getter
will randomly sample directions from the FOD each time the tracking algorithm
needs to take another step.
"""

from dipy.direction import ProbabilisticDirectionGetter

prob_dg = ProbabilisticDirectionGetter.from_shcoeff(csd_fit.shm_coeff,
                                                    max_angle=30.,
                                                    sphere=default_sphere)

"""
As with deterministic tracking, we'll need to use a tissue classifier to
restrict the tracking to the white matter of the brain. One might be tempted
to use the GFA of the CSD FODs to build a tissue classifier, however the GFA
values of these FODs don't classify gray matter and white matter well. We will
therefore use the GFA from the CSA model which we fit for the first section of
this example. Alternatively, one could fit a ``TensorModel`` to the data and use
the fractional anisotropy (FA) to build a tissue classifier.
"""

classifier = ThresholdTissueClassifier(csa_peaks.gfa, .25)

"""
Exemple #40
0
def run_tracking(step_curv_combinations,
                 recon_path,
                 n_seeds_per_iter,
                 directget,
                 maxcrossing,
                 max_length,
                 pft_back_tracking_dist,
                 pft_front_tracking_dist,
                 particle_count,
                 roi_neighborhood_tol,
                 waymask,
                 min_length,
                 track_type,
                 min_separation_angle,
                 sphere,
                 tiss_class,
                 tissues4d,
                 cache_dir,
                 min_seeds=100):

    import gc
    import os
    import h5py
    from dipy.tracking import utils
    from dipy.tracking.streamline import select_by_rois
    from dipy.tracking.local_tracking import LocalTracking, \
        ParticleFilteringTracking
    from dipy.direction import (ProbabilisticDirectionGetter,
                                ClosestPeakDirectionGetter,
                                DeterministicMaximumDirectionGetter)
    from nilearn.image import index_img
    from pynets.dmri.track import prep_tissues
    from nibabel.streamlines.array_sequence import ArraySequence
    from nipype.utils.filemanip import copyfile, fname_presuffix
    import uuid
    from time import strftime

    run_uuid = f"{strftime('%Y%m%d_%H%M%S')}_{uuid.uuid4()}"

    recon_path_tmp_path = fname_presuffix(
        recon_path,
        suffix=f"_{'_'.join([str(i) for i in step_curv_combinations])}_"
        f"{run_uuid}",
        newpath=cache_dir)
    copyfile(recon_path, recon_path_tmp_path, copy=True, use_hardlink=False)

    tissues4d_tmp_path = fname_presuffix(
        tissues4d,
        suffix=f"_{'_'.join([str(i) for i in step_curv_combinations])}_"
        f"{run_uuid}",
        newpath=cache_dir)
    copyfile(tissues4d, tissues4d_tmp_path, copy=True, use_hardlink=False)

    if waymask is not None:
        waymask_tmp_path = fname_presuffix(
            waymask,
            suffix=f"_{'_'.join([str(i) for i in step_curv_combinations])}_"
            f"{run_uuid}",
            newpath=cache_dir)
        copyfile(waymask, waymask_tmp_path, copy=True, use_hardlink=False)
    else:
        waymask_tmp_path = None

    tissue_img = nib.load(tissues4d_tmp_path)

    # Order:
    B0_mask = index_img(tissue_img, 0)
    atlas_img = index_img(tissue_img, 1)
    seeding_mask = index_img(tissue_img, 2)
    t1w2dwi = index_img(tissue_img, 3)
    gm_in_dwi = index_img(tissue_img, 4)
    vent_csf_in_dwi = index_img(tissue_img, 5)
    wm_in_dwi = index_img(tissue_img, 6)

    tiss_classifier = prep_tissues(t1w2dwi, gm_in_dwi, vent_csf_in_dwi,
                                   wm_in_dwi, tiss_class, B0_mask)

    B0_mask_data = np.asarray(B0_mask.dataobj).astype("bool")

    seeding_mask = np.asarray(
        seeding_mask.dataobj).astype("bool").astype("int16")

    with h5py.File(recon_path_tmp_path, 'r+') as hf:
        mod_fit = hf['reconstruction'][:].astype('float32')

    print("%s%s" % ("Curvature: ", step_curv_combinations[1]))

    # Instantiate DirectionGetter
    if directget.lower() in ["probabilistic", "prob"]:
        dg = ProbabilisticDirectionGetter.from_shcoeff(
            mod_fit,
            max_angle=float(step_curv_combinations[1]),
            sphere=sphere,
            min_separation_angle=min_separation_angle,
        )
    elif directget.lower() in ["closestpeaks", "cp"]:
        dg = ClosestPeakDirectionGetter.from_shcoeff(
            mod_fit,
            max_angle=float(step_curv_combinations[1]),
            sphere=sphere,
            min_separation_angle=min_separation_angle,
        )
    elif directget.lower() in ["deterministic", "det"]:
        maxcrossing = 1
        dg = DeterministicMaximumDirectionGetter.from_shcoeff(
            mod_fit,
            max_angle=float(step_curv_combinations[1]),
            sphere=sphere,
            min_separation_angle=min_separation_angle,
        )
    else:
        raise ValueError("ERROR: No valid direction getter(s) specified.")

    print("%s%s" % ("Step: ", step_curv_combinations[0]))

    # Perform wm-gm interface seeding, using n_seeds at a time
    seeds = utils.random_seeds_from_mask(
        seeding_mask > 0,
        seeds_count=n_seeds_per_iter,
        seed_count_per_voxel=False,
        affine=np.eye(4),
    )
    if len(seeds) < min_seeds:
        print(
            UserWarning(
                f"<{min_seeds} valid seed points found in wm-gm interface..."))
        return None

    # print(seeds)

    # Perform tracking
    if track_type == "local":
        streamline_generator = LocalTracking(dg,
                                             tiss_classifier,
                                             seeds,
                                             np.eye(4),
                                             max_cross=int(maxcrossing),
                                             maxlen=int(max_length),
                                             step_size=float(
                                                 step_curv_combinations[0]),
                                             fixedstep=False,
                                             return_all=True,
                                             random_seed=42)
    elif track_type == "particle":
        streamline_generator = ParticleFilteringTracking(
            dg,
            tiss_classifier,
            seeds,
            np.eye(4),
            max_cross=int(maxcrossing),
            step_size=float(step_curv_combinations[0]),
            maxlen=int(max_length),
            pft_back_tracking_dist=pft_back_tracking_dist,
            pft_front_tracking_dist=pft_front_tracking_dist,
            pft_max_trial=20,
            particle_count=particle_count,
            return_all=True,
            random_seed=42)
    else:
        raise ValueError("ERROR: No valid tracking method(s) specified.")

    # Filter resulting streamlines by those that stay entirely
    # inside the brain
    try:
        roi_proximal_streamlines = utils.target(streamline_generator,
                                                np.eye(4),
                                                B0_mask_data.astype('bool'),
                                                include=True)
    except BaseException:
        print('No streamlines found inside the brain! ' 'Check registrations.')
        return None

    del mod_fit, seeds, tiss_classifier, streamline_generator, \
        B0_mask_data, seeding_mask, dg

    B0_mask.uncache()
    atlas_img.uncache()
    t1w2dwi.uncache()
    gm_in_dwi.uncache()
    vent_csf_in_dwi.uncache()
    wm_in_dwi.uncache()
    atlas_img.uncache()
    tissue_img.uncache()
    gc.collect()

    # Filter resulting streamlines by roi-intersection
    # characteristics
    atlas_data = np.array(atlas_img.dataobj).astype("uint16")

    # Build mask vector from atlas for later roi filtering
    parcels = []
    i = 0
    intensities = [i for i in np.unique(atlas_data) if i != 0]
    for roi_val in intensities:
        parcels.append(atlas_data == roi_val)
        i += 1

    parcel_vec = list(np.ones(len(parcels)).astype("bool"))

    try:
        roi_proximal_streamlines = \
            nib.streamlines.array_sequence.ArraySequence(
                select_by_rois(
                    roi_proximal_streamlines,
                    affine=np.eye(4),
                    rois=parcels,
                    include=parcel_vec,
                    mode="any",
                    tol=roi_neighborhood_tol,
                )
            )
        print("%s%s" % ("Filtering by: \nNode intersection: ",
                        len(roi_proximal_streamlines)))
    except BaseException:
        print('No streamlines found to connect any parcels! '
              'Check registrations.')
        return None

    try:
        roi_proximal_streamlines = nib.streamlines. \
            array_sequence.ArraySequence(
                [
                    s for s in roi_proximal_streamlines
                    if len(s) >= float(min_length)
                ]
            )
        print(f"Minimum fiber length >{min_length}mm: "
              f"{len(roi_proximal_streamlines)}")
    except BaseException:
        print('No streamlines remaining after minimal length criterion.')
        return None

    if waymask is not None and os.path.isfile(waymask_tmp_path):
        waymask_data = np.asarray(
            nib.load(waymask_tmp_path).dataobj).astype("bool")
        try:
            roi_proximal_streamlines = roi_proximal_streamlines[utils.near_roi(
                roi_proximal_streamlines,
                np.eye(4),
                waymask_data,
                tol=int(round(roi_neighborhood_tol * 0.50, 1)),
                mode="all")]
            print("%s%s" %
                  ("Waymask proximity: ", len(roi_proximal_streamlines)))
            del waymask_data
        except BaseException:
            print('No streamlines remaining in waymask\'s vacinity.')
            return None

    hf.close()
    del parcels, atlas_data

    tmp_files = [tissues4d_tmp_path, waymask_tmp_path, recon_path_tmp_path]
    for j in tmp_files:
        if j is not None:
            if os.path.isfile(j):
                os.system(f"rm -f {j} &")

    if len(roi_proximal_streamlines) > 0:
        return ArraySequence(
            [s.astype("float32") for s in roi_proximal_streamlines])
    else:
        return None
Exemple #41
0
renderer = window.Renderer()

img_pve_csf, img_pve_gm, img_pve_wm = read_stanford_pve_maps()
hardi_img, gtab, labels_img = read_stanford_labels()

data = hardi_img.get_data()
labels = labels_img.get_data()
affine = hardi_img.affine
shape = labels.shape

response, ratio = auto_response(gtab, data, roi_radius=10, fa_thr=0.7)
csd_model = ConstrainedSphericalDeconvModel(gtab, response)
csd_fit = csd_model.fit(data, mask=img_pve_wm.get_data())

dg = ProbabilisticDirectionGetter.from_shcoeff(csd_fit.shm_coeff,
                                               max_angle=20.,
                                               sphere=default_sphere)
"""
CMC/ACT Tissue Classifiers
---------------------
Continuous map criterion (CMC) [Girard2014]_ and Anatomically-constrained
tractography (ACT) [Smith2012]_ both uses PVEs information from
anatomical images to determine when the tractography stops.
Both tissue classifiers use a trilinear interpolation
at the tracking position. CMC tissue classifier uses a probability derived from
the PVE maps to determine if the streamline reaches a 'valid' or 'invalid'
region. ACT uses a fixed threshold on the PVE maps. Both tissue classifiers can
be used in conjunction with PFT. In this example, we used CMC.
"""

from dipy.tracking.local import CmcTissueClassifier
Exemple #42
0
def test_stop_conditions():
    """This tests that the Local Tracker behaves as expected for the
    following tissue types.
    """
    # TissueTypes.TRACKPOINT = 1
    # TissueTypes.ENDPOINT = 2
    # TissueTypes.INVALIDPOINT = 0
    tissue = np.array([[2, 1, 1, 2, 1],
                       [2, 2, 1, 1, 2],
                       [1, 1, 1, 1, 1],
                       [1, 1, 1, 2, 2],
                       [0, 1, 1, 1, 2],
                       [0, 1, 1, 0, 2],
                       [1, 0, 1, 1, 1]])
    tissue = tissue[None]

    sphere = HemiSphere.from_sphere(unit_octahedron)
    pmf_lookup = np.array([[0., 0., 0., ],
                           [0., 0., 1.]])
    pmf = pmf_lookup[(tissue > 0).astype("int")]

    # Create a seeds along
    x = np.array([0., 0, 0, 0, 0, 0, 0])
    y = np.array([0., 1, 2, 3, 4, 5, 6])
    z = np.array([1., 1, 1, 0, 1, 1, 1])
    seeds = np.column_stack([x, y, z])

    # Set up tracking
    endpoint_mask = tissue == TissueTypes.ENDPOINT
    invalidpoint_mask = tissue == TissueTypes.INVALIDPOINT
    tc = ActTissueClassifier(endpoint_mask, invalidpoint_mask)
    dg = ProbabilisticDirectionGetter.from_pmf(pmf, 60, sphere)

    # valid streamlines only
    streamlines_generator = LocalTracking(direction_getter=dg,
                                          tissue_classifier=tc,
                                          seeds=seeds,
                                          affine=np.eye(4),
                                          step_size=1.,
                                          return_all=False)
    streamlines_not_all = iter(streamlines_generator)

    # all streamlines
    streamlines_all_generator = LocalTracking(direction_getter=dg,
                                              tissue_classifier=tc,
                                              seeds=seeds,
                                              affine=np.eye(4),
                                              step_size=1.,
                                              return_all=True)
    streamlines_all = iter(streamlines_all_generator)

    # Check that the first streamline stops at 0 and 3 (ENDPOINT)
    y = 0
    sl = next(streamlines_not_all)
    npt.assert_equal(sl[0], [0, y, 0])
    npt.assert_equal(sl[-1], [0, y, 3])
    npt.assert_equal(len(sl), 4)

    sl = next(streamlines_all)
    npt.assert_equal(sl[0], [0, y, 0])
    npt.assert_equal(sl[-1], [0, y, 3])
    npt.assert_equal(len(sl), 4)

    # Check that the first streamline stops at 0 and 4 (ENDPOINT)
    y = 1
    sl = next(streamlines_not_all)
    npt.assert_equal(sl[0], [0, y, 0])
    npt.assert_equal(sl[-1], [0, y, 4])
    npt.assert_equal(len(sl), 5)

    sl = next(streamlines_all)
    npt.assert_equal(sl[0], [0, y, 0])
    npt.assert_equal(sl[-1], [0, y, 4])
    npt.assert_equal(len(sl), 5)

    # This streamline should be the same as above. This row does not have
    # ENDPOINTs, but the streamline should stop at the edge and not include
    # OUTSIDEIMAGE points.
    y = 2
    sl = next(streamlines_not_all)
    npt.assert_equal(sl[0], [0, y, 0])
    npt.assert_equal(sl[-1], [0, y, 4])
    npt.assert_equal(len(sl), 5)

    sl = next(streamlines_all)
    npt.assert_equal(sl[0], [0, y, 0])
    npt.assert_equal(sl[-1], [0, y, 4])
    npt.assert_equal(len(sl), 5)

    # If we seed on the edge, the first (or last) point in the streamline
    # should be the seed.
    y = 3
    sl = next(streamlines_not_all)
    npt.assert_equal(sl[0], seeds[y])

    sl = next(streamlines_all)
    npt.assert_equal(sl[0], seeds[y])

    # The last 3 seeds should not produce streamlines,
    # INVALIDPOINT streamlines are rejected (return_all=False).
    npt.assert_equal(len(list(streamlines_not_all)), 0)

    # The last 3 seeds should produce invalid streamlines,
    # INVALIDPOINT streamlines are kept (return_all=True).
    # The streamline stops at 0 (INVALIDPOINT) and 4 (ENDPOINT)
    y = 4
    sl = next(streamlines_all)
    npt.assert_equal(sl[0], [0, y, 0])
    npt.assert_equal(sl[-1], [0, y, 4])
    npt.assert_equal(len(sl), 5)

    # The streamline stops at 0 (INVALIDPOINT) and 4 (INVALIDPOINT)
    y = 5
    sl = next(streamlines_all)
    npt.assert_equal(sl[0], [0, y, 0])
    npt.assert_equal(sl[-1], [0, y, 3])
    npt.assert_equal(len(sl), 4)

    # The last streamline should contain only one point, the seed point,
    # because no valid inital direction was returned.
    y = 6
    sl = next(streamlines_all)
    npt.assert_equal(sl[0], seeds[y])
    npt.assert_equal(sl[-1], seeds[y])
    npt.assert_equal(len(sl), 1)