Exemple #1
0
def test_rb_slr_threads():

    rng_multi = np.random.RandomState(42)
    rb_multi = RecoBundles(f, greater_than=0, clust_thr=10,
                           rng=np.random.RandomState(42))
    rec_trans_multi_threads, _ = rb_multi.recognize(model_bundle=f2,
                                                    model_clust_thr=5.,
                                                    reduction_thr=10,
                                                    slr=True,
                                                    num_threads=None)

    rb_single = RecoBundles(f, greater_than=0, clust_thr=10,
                            rng=np.random.RandomState(42))
    rec_trans_single_thread, _ = rb_single.recognize(model_bundle=f2,
                                                     model_clust_thr=5.,
                                                     reduction_thr=10,
                                                     slr=True,
                                                     num_threads=1)

    D = bundles_distances_mam(rec_trans_multi_threads, rec_trans_single_thread)

    # check if the bundle is recognized correctly
    # multi-threading prevent an exact match
    for row in D:
        assert_almost_equal(row.min(), 0, decimal=4)
Exemple #2
0
def test_rb_slr_threads():

    rng_multi = np.random.RandomState(42)
    rb_multi = RecoBundles(f, greater_than=0, clust_thr=10,
                           rng=np.random.RandomState(42))
    rec_trans_multi_threads, _ = rb_multi.recognize(model_bundle=f2,
                                                    model_clust_thr=5.,
                                                    reduction_thr=10,
                                                    slr=True,
                                                    slr_num_threads=None)

    rb_single = RecoBundles(f, greater_than=0, clust_thr=10,
                            rng=np.random.RandomState(42))
    rec_trans_single_thread, _ = rb_single.recognize(model_bundle=f2,
                                              model_clust_thr=5.,
                                              reduction_thr=10,
                                              slr=True,
                                              slr_num_threads=1)

    D = bundles_distances_mam(rec_trans_multi_threads, rec_trans_single_thread)

    # check if the bundle is recognized correctly
    # multi-threading prevent an exact match
    for row in D:
        assert_almost_equal(row.min(), 0, decimal=4)
Exemple #3
0
def test_rb_disable_slr():

    rb = RecoBundles(f, greater_than=0, clust_thr=10)

    rec_trans, rec_labels = rb.recognize(model_bundle=f2,
                                         model_clust_thr=5.,
                                         reduction_thr=10,
                                         slr=False)

    D = bundles_distances_mam(f2, f[rec_labels])

    # check if the bundle is recognized correctly
    if len(f2) == len(rec_labels):
        for row in D:
            assert_equal(row.min(), 0)

    refine_trans, refine_labels = rb.refine(model_bundle=f2,
                                            pruned_streamlines=rec_trans,
                                            model_clust_thr=5.,
                                            reduction_thr=10)

    D = bundles_distances_mam(f2, f[refine_labels])

    # check if the bundle is recognized correctly
    for row in D:
        assert_equal(row.min(), 0)
Exemple #4
0
def test_rb_reduction_mam():

    rb = RecoBundles(f, greater_than=0, clust_thr=10, verbose=True)

    rec_trans, rec_labels = rb.recognize(model_bundle=f2,
                                         model_clust_thr=5.,
                                         reduction_thr=10,
                                         reduction_distance='mam',
                                         slr=True,
                                         slr_metric='asymmetric',
                                         pruning_distance='mam')

    D = bundles_distances_mam(f2, f[rec_labels])

    # check if the bundle is recognized correctly
    if len(f2) == len(rec_labels):
        for row in D:
            assert_equal(row.min(), 0)

    refine_trans, refine_labels = rb.refine(model_bundle=f2,
                                            pruned_streamlines=rec_trans,
                                            model_clust_thr=5.,
                                            reduction_thr=10)

    D = bundles_distances_mam(f2, f[refine_labels])

    # check if the bundle is recognized correctly
    for row in D:
        assert_equal(row.min(), 0)
Exemple #5
0
def test_rb_no_neighb():
    # what if no neighbors are found? No recognition

    b = Streamlines(fornix)
    b1 = b.copy()

    b2 = b1[:20].copy()
    b2._data += np.array([100, 0, 0])

    b3 = b1[:20].copy()
    b3._data += np.array([300, 0, 0])

    b.extend(b3)

    rb = RecoBundles(b, greater_than=0, clust_thr=10)

    rec_trans, rec_labels = rb.recognize(model_bundle=b2,
                                         model_clust_thr=5.,
                                         reduction_thr=10)

    if len(rec_trans) > 0:
        refine_trans, refine_labels = rb.refine(model_bundle=b2,
                                                pruned_streamlines=rec_trans,
                                                model_clust_thr=5.,
                                                reduction_thr=10)

        assert_equal(len(refine_labels), 0)
        assert_equal(len(refine_trans), 0)

    else:
        assert_equal(len(rec_labels), 0)
        assert_equal(len(rec_trans), 0)
Exemple #6
0
def test_rb_clustermap():

    cluster_map = qbx_and_merge(f, thresholds=[40, 25, 20, 10])

    rb = RecoBundles(f, greater_than=0, less_than=1000000,
                     cluster_map=cluster_map, clust_thr=10)
    rec_trans, rec_labels = rb.recognize(model_bundle=f2,
                                         model_clust_thr=5.,
                                         reduction_thr=10)

    D = bundles_distances_mam(f2, f[rec_labels])

    # check if the bundle is recognized correctly
    if len(f2) == len(rec_labels):
        for row in D:
            assert_equal(row.min(), 0)

    refine_trans, refine_labels = rb.refine(model_bundle=f2,
                                            pruned_streamlines=rec_trans,
                                            model_clust_thr=5.,
                                            reduction_thr=10)

    D = bundles_distances_mam(f2, f[refine_labels])

    # check if the bundle is recognized correctly
    for row in D:
        assert_equal(row.min(), 0)
Exemple #7
0
def test_rb_no_neighb():
    # what if no neighbors are found? No recognition

    b = Streamlines(fornix)
    b1 = b.copy()

    b2 = b1[:20].copy()
    b2._data += np.array([100, 0, 0])

    b3 = b1[:20].copy()
    b3._data += np.array([300, 0, 0])

    b.extend(b3)

    rb = RecoBundles(b, greater_than=0, clust_thr=10)

    rec_trans, rec_labels = rb.recognize(model_bundle=b2,
                                         model_clust_thr=5.,
                                         reduction_thr=10)

    if len(rec_trans) > 0:
        refine_trans, refine_labels = rb.refine(model_bundle=b2,
                                                pruned_streamlines=rec_trans,
                                                model_clust_thr=5.,
                                                reduction_thr=10)

        assert_equal(len(refine_labels), 0)
        assert_equal(len(refine_trans), 0)

    else:
        assert_equal(len(rec_labels), 0)
        assert_equal(len(rec_trans), 0)
Exemple #8
0
def test_rb_reduction_mam():

    rb = RecoBundles(f, greater_than=0, clust_thr=10, verbose=True)

    rec_trans, rec_labels = rb.recognize(model_bundle=f2,
                                         model_clust_thr=5.,
                                         reduction_thr=10,
                                         reduction_distance='mam',
                                         slr=True,
                                         slr_metric='asymmetric',
                                         pruning_distance='mam')

    D = bundles_distances_mam(f2, f[rec_labels])

    # check if the bundle is recognized correctly
    if len(f2) == len(rec_labels):
        for row in D:
            assert_equal(row.min(), 0)

    refine_trans, refine_labels = rb.refine(model_bundle=f2,
                                            pruned_streamlines=rec_trans,
                                            model_clust_thr=5.,
                                            reduction_thr=10)

    D = bundles_distances_mam(f2, f[refine_labels])

    # check if the bundle is recognized correctly
    for row in D:
        assert_equal(row.min(), 0)
Exemple #9
0
def test_rb_disable_slr():

    rb = RecoBundles(f, greater_than=0, clust_thr=10)

    rec_trans, rec_labels = rb.recognize(model_bundle=f2,
                                         model_clust_thr=5.,
                                         reduction_thr=10,
                                         slr=False)

    D = bundles_distances_mam(f2, f[rec_labels])

    # check if the bundle is recognized correctly
    if len(f2) == len(rec_labels):
        for row in D:
            assert_equal(row.min(), 0)

    refine_trans, refine_labels = rb.refine(model_bundle=f2,
                                            pruned_streamlines=rec_trans,
                                            model_clust_thr=5.,
                                            reduction_thr=10)

    D = bundles_distances_mam(f2, f[refine_labels])

    # check if the bundle is recognized correctly
    for row in D:
        assert_equal(row.min(), 0)
def main():
    parser = _build_arg_parser()
    args = parser.parse_args()

    assert_inputs_exist(parser, [args.in_tractogram, args.in_transfo])
    assert_outputs_exist(parser, args, args.out_tractogram)

    if args.verbose:
        log_level = logging.INFO
        logging.basicConfig(level=log_level)

    wb_file = load_tractogram_with_reference(parser, args, args.in_tractogram)
    wb_streamlines = wb_file.streamlines
    model_file = load_tractogram_with_reference(parser, args, args.in_model)

    transfo = load_matrix_in_any_format(args.in_transfo)
    if args.inverse:
        transfo = np.linalg.inv(load_matrix_in_any_format(args.in_transfo))

    before, after = compute_distance_barycenters(wb_file, model_file, transfo)
    if after > before:
        logging.warning('The distance between volumes barycenter should be '
                        'lower after registration. Maybe try using/removing '
                        '--inverse.')
        logging.info('Distance before: {}, Distance after: {}'.format(
            np.round(before, 3), np.round(after, 3)))
    model_streamlines = transform_streamlines(model_file.streamlines, transfo)

    rng = np.random.RandomState(args.seed)
    if args.in_pickle:
        with open(args.in_pickle, 'rb') as infile:
            cluster_map = pickle.load(infile)
        reco_obj = RecoBundles(wb_streamlines,
                               cluster_map=cluster_map,
                               rng=rng,
                               verbose=args.verbose)
    else:
        reco_obj = RecoBundles(wb_streamlines,
                               clust_thr=args.tractogram_clustering_thr,
                               rng=rng,
                               verbose=args.verbose)

    if args.out_pickle:
        with open(args.out_pickle, 'wb') as outfile:
            pickle.dump(reco_obj.cluster_map, outfile)
    _, indices = reco_obj.recognize(ArraySequence(model_streamlines),
                                    args.model_clustering_thr,
                                    pruning_thr=args.pruning_thr,
                                    slr_num_threads=args.slr_threads)
    new_streamlines = wb_streamlines[indices]
    new_data_per_streamlines = wb_file.data_per_streamline[indices]
    new_data_per_points = wb_file.data_per_point[indices]

    if not args.no_empty or new_streamlines:
        sft = StatefulTractogram(new_streamlines,
                                 wb_file.space_attributes,
                                 Space.RASMM,
                                 data_per_streamline=new_data_per_streamlines,
                                 data_per_point=new_data_per_points)
        save_tractogram(sft, args.out_tractogram)
Exemple #11
0
def test_rb_clustermap():

    cluster_map = qbx_and_merge(f, thresholds=[40, 25, 20, 10])

    rb = RecoBundles(f, greater_than=0, less_than=1000000,
                     cluster_map=cluster_map, clust_thr=10)
    rec_trans, rec_labels = rb.recognize(model_bundle=f2,
                                         model_clust_thr=5.,
                                         reduction_thr=10)

    D = bundles_distances_mam(f2, f[rec_labels])

    # check if the bundle is recognized correctly
    if len(f2) == len(rec_labels):
        for row in D:
            assert_equal(row.min(), 0)

    refine_trans, refine_labels = rb.refine(model_bundle=f2,
                                            pruned_streamlines=rec_trans,
                                            model_clust_thr=5.,
                                            reduction_thr=10)

    D = bundles_distances_mam(f2, f[refine_labels])

    # check if the bundle is recognized correctly
    for row in D:
        assert_equal(row.min(), 0)
Exemple #12
0
    def segment_reco(self, tg=None):
        """
        Segment streamlines using the RecoBundles algorithm [Garyfallidis2017]
        Parameters
        ----------
        tg : StatefulTractogram class instance
            A whole-brain tractogram to be segmented.
        Returns
        -------
        fiber_groups : dict
            Keys are names of the bundles, values are Streamline objects.
            The streamlines in each object have all been oriented to have the
            same orientation (using `dts.orient_by_streamline`).
        """
        tg = self._read_tg(tg=tg)
        fiber_groups = {}

        self.move_streamlines(tg, self.reg_algo)
        # We generate our instance of RB with the moved streamlines:
        self.logger.info("Extracting Bundles")
        rb = RecoBundles(self.moved_sl, verbose=False, rng=self.rng)
        # Next we'll iterate over bundles, registering each one:
        bundle_list = list(self.bundle_dict.keys())
        bundle_list.remove('whole_brain')

        self.logger.info("Assigning Streamlines to Bundles")
        for bundle in bundle_list:
            model_sl = self.bundle_dict[bundle]['sl']
            _, rec_labels = rb.recognize(model_bundle=model_sl,
                                         model_clust_thr=self.model_clust_thr,
                                         reduction_thr=self.reduction_thr,
                                         reduction_distance='mdf',
                                         slr=True,
                                         slr_metric='asymmetric',
                                         pruning_distance='mdf')

            # Use the streamlines in the original space:
            recognized_sl = tg.streamlines[rec_labels]
            if self.refine and len(recognized_sl) > 0:
                _, rec_labels = rb.refine(model_sl,
                                          recognized_sl,
                                          self.model_clust_thr,
                                          reduction_thr=self.reduction_thr,
                                          pruning_thr=self.pruning_thr)
                recognized_sl = tg.streamlines[rec_labels]

            standard_sl = self.bundle_dict[bundle]['centroid']
            oriented_sl = dts.orient_by_streamline(recognized_sl, standard_sl)
            if self.return_idx:
                fiber_groups[bundle] = {}
                fiber_groups[bundle]['idx'] = rec_labels
                fiber_groups[bundle]['sl'] = StatefulTractogram(
                    oriented_sl, self.img, Space.RASMM)
            else:
                fiber_groups[bundle] = StatefulTractogram(
                    oriented_sl, self.img, Space.RASMM)
        self.fiber_groups = fiber_groups
        return fiber_groups
Exemple #13
0
def run_rb(template, bucket, pruning_thr=10):
    # try pruning thresh 10 if not specific drop to 5

    rb = RecoBundles(bucket, clust_thr=5)
    # TODO: for efficiency, we want to segment all model bundes at once
    recog_bundle, recog_labels = rb.recognize(model_bundle=template,
                                              model_clust_thr=5.,
                                              reduction_thr=10,
                                              pruning_thr=pruning_thr,
                                              reduction_distance='mam')
    return recog_bundle, recog_labels
Exemple #14
0
def test_rb_check_defaults():

    rb = RecoBundles(f, clust_thr=10)
    rec_trans, rec_labels, recognized = rb.recognize(model_bundle=f2,
                                                     model_clust_thr=5.,
                                                     reduction_thr=10)
    D = bundles_distances_mam(f2, recognized)

    # check if the bundle is recognized correctly
    for row in D:
        assert_equal(row.min(), 0)
Exemple #15
0
def recobundles(streamlines, bundle_dict):
    """
    Segment streamlines using the RecoBundles algorithm [Garyfallidis2017]

    Parameters
    ----------
    streamlines : list or Streamlines object.
        A whole-brain tractogram to be segmented.
    bundle_dict: dictionary
        Of the form:

            {'whole_brain': Streamlines,
            'CST_L': {'sl': Streamlines, 'centroid': array},
            'CST_R': {'sl': Streamlines, 'centroid': array},
            ...}

    Returns
    -------
    fiber_groups : dict
        Keys are names of the bundles, values are Streamline objects.
        The streamlines in each object have all been oriented to have the
        same orientation (using `dts.orient_by_streamline`).
    """
    fiber_groups = {}
    # We start with whole-brain SLR:
    atlas = bundle_dict['whole_brain']
    moved, transform, qb_centroids1, qb_centroids2 = whole_brain_slr(
        atlas, streamlines, x0='affine', verbose=False, progressive=True)

    # We generate our instance of RB with the moved streamlines:
    rb = RecoBundles(moved, verbose=False)

    # Next we'll iterate over bundles, registering each one:
    bundle_list = list(bundle_dict.keys())
    bundle_list.remove('whole_brain')

    for bundle in bundle_list:
        model_sl = bundle_dict[bundle]['sl']
        _, rec_labels = rb.recognize(model_bundle=model_sl,
                                     model_clust_thr=5.,
                                     reduction_thr=10,
                                     reduction_distance='mam',
                                     slr=True,
                                     slr_metric='asymmetric',
                                     pruning_distance='mam')

        # Use the streamlines in the original space:
        recognized_sl = streamlines[rec_labels]
        standard_sl = bundle_dict[bundle]['centroid']
        oriented_sl = dts.orient_by_streamline(recognized_sl, standard_sl)
        fiber_groups[bundle] = oriented_sl
    return fiber_groups
Exemple #16
0
def test_rb_clustermap():

    cluster_map = qbx_and_merge(f, thresholds=[40, 25, 20, 10])

    rb = RecoBundles(f, cluster_map=cluster_map, clust_thr=10)
    rec_trans, rec_labels, recognized = rb.recognize(model_bundle=f2,
                                                     model_clust_thr=5.,
                                                     reduction_thr=10)
    D = bundles_distances_mam(f2, recognized)

    # check if the bundle is recognized correctly
    for row in D:
        assert_equal(row.min(), 0)
Exemple #17
0
def run_rb(template, bucket, cluster_map=None, pruning_thr=10):
    # try pruning thresh 10 if not specific drop to 5
    if cluster_map is None:
        cluster_map = qbx_and_merge(bucket, thresholds=[40, 25, 20, 10])
    else:
        print("Loading provided cluster map")

    rb = RecoBundles(bucket, cluster_map=cluster_map, clust_thr=5)
    bundle_tsp, labels, bundle_bsp = rb.recognize(model_bundle=template,
                                                  model_clust_thr=5.,
                                                  reduction_thr=10,
                                                  pruning_thr=pruning_thr)
    return bundle_bsp, cluster_map
Exemple #18
0
def main():
    parser = _build_args_parser()
    args = parser.parse_args()

    assert_inputs_exist(parser, [args.in_tractogram, args.transformation])
    assert_outputs_exist(parser, args, args.output_name)

    wb_file = load_tractogram_with_reference(parser, args, args.in_tractogram)
    wb_streamlines = wb_file.streamlines
    model_file = load_tractogram_with_reference(parser, args, args.in_model)

    # Default transformation source is expected to be ANTs
    transfo = np.loadtxt(args.transformation)
    if args.inverse:
        transfo = np.linalg.inv(np.loadtxt(args.transformation))

    model_streamlines = ArraySequence(
        transform_streamlines(model_file.streamlines, transfo))

    rng = np.random.RandomState(args.seed)
    if args.input_pickle:
        with open(args.input_pickle, 'rb') as infile:
            cluster_map = pickle.load(infile)
        reco_obj = RecoBundles(wb_streamlines,
                               cluster_map=cluster_map,
                               rng=rng,
                               verbose=args.verbose)
    else:
        reco_obj = RecoBundles(wb_streamlines,
                               clust_thr=args.wb_clustering_thr,
                               rng=rng,
                               verbose=args.verbose)

    if args.output_pickle:
        with open(args.output_pickle, 'wb') as outfile:
            pickle.dump(reco_obj.cluster_map, outfile)
    _, indices = reco_obj.recognize(model_streamlines,
                                    args.model_clustering_thr,
                                    pruning_thr=args.pruning_thr,
                                    slr_num_threads=args.slr_threads)
    new_streamlines = wb_streamlines[indices]
    new_data_per_streamlines = wb_file.data_per_streamline[indices]
    new_data_per_points = wb_file.data_per_point[indices]

    if not args.no_empty or new_streamlines:
        sft = StatefulTractogram(new_streamlines,
                                 wb_file,
                                 Space.RASMM,
                                 data_per_streamline=new_data_per_streamlines,
                                 data_per_point=new_data_per_points)
        save_tractogram(sft, args.output_name)
Exemple #19
0
def bundle_extract(atlas_track_path, atlas_bundle_path, target_track_path):

    time0 = time.time()

    atlas_file = atlas_track_path
    target_file = target_track_path

    print('loading data begin! time:', time.time() - time0)

    sft_atlas = load_trk(atlas_file, "same", bbox_valid_check=False)
    atlas = sft_atlas.streamlines
    atlas_header = create_tractogram_header(atlas_file,
                                            *sft_atlas.space_attributes)

    sft_target = load_trk(target_file, "same", bbox_valid_check=False)
    target = sft_target.streamlines
    target_header = create_tractogram_header(target_file,
                                             *sft_target.space_attributes)

    moved, transform, qb_centroids1, qb_centroids2 = whole_brain_slr(
        atlas,
        target,
        x0='affine',
        verbose=True,
        progressive=True,
        rng=np.random.RandomState(1984))

    bundle_track = StatefulTractogram(moved, target_header, Space.RASMM)
    save_trk(bundle_track, 'moved.trk', bbox_valid_check=False)

    np.save("slr_transform.npy", transform)

    model_bundle_file = atlas_bundle_path
    model_bundle = load_trk(model_bundle_file, "same", bbox_valid_check=False)
    model_bundle = model_bundle.streamlines

    print('comparing begin! time:', time.time() - time0)

    rb = RecoBundles(moved, verbose=True, rng=np.random.RandomState(2001))

    recognized_bundle, bundle_labels = rb.recognize(model_bundle=model_bundle,
                                                    model_clust_thr=0,
                                                    reduction_thr=20,
                                                    reduction_distance='mam',
                                                    slr=True,
                                                    slr_metric='asymmetric',
                                                    pruning_distance='mam')

    bundle_track = StatefulTractogram(target[bundle_labels], target_header,
                                      Space.RASMM)
    return bundle_track
Exemple #20
0
def test_rb_no_verbose_and_mam():

    rb = RecoBundles(f, clust_thr=10, verbose=False)

    rec_trans, rec_labels, recognized = rb.recognize(model_bundle=f2,
                                                     model_clust_thr=5.,
                                                     reduction_thr=10,
                                                     slr=True,
                                                     pruning_distance='mam')

    D = bundles_distances_mam(f2, recognized)

    # check if the bundle is recognized correctly
    for row in D:
        assert_equal(row.min(), 0)
Exemple #21
0
def run_rb(templatesls, bucketosls, cluster_map=None, pruning_thr=10):
    # try pruning thresh 10 if not specific drop to 5
    if cluster_map is None:
        cluster_map = qbx_and_merge(bucketosls, thresholds=[40, 25, 20, 10])
    else:
        print("Loading provided cluster map")

    rb = RecoBundles(bucketosls, cluster_map=cluster_map, clust_thr=5)
    recognized_atlassp, rec_labels, recognized_ptsp = rb.recognize(
        model_bundle=templatesls,
        model_clust_thr=5.,
        reduction_thr=10,
        pruning_thr=pruning_thr)
    '''rb = RecoBundles(bucketosls, cluster_map=cluster_map, clust_thr=10)
    recognized, rec_labels, rec_trans = rb.recognize(model_bundle=templatesls,
                                                         model_clust_thr=1.)'''
    #D = bundles_distances_mam(templatesls, recognized)

    return recognized_ptsp, cluster_map
Exemple #22
0
def find_bundle(dipy_home, moved, bundle_num, rt=50, mct=0.1):
    bundle_folder = dipy_home + r'\bundle_atlas_hcp842\Atlas_80_Bundles\bundles'
    bundles = os.listdir(bundle_folder)
    model_file = pjoin(dipy_home, 'bundle_atlas_hcp842', 'Atlas_80_Bundles',
                       'bundles', bundles[bundle_num])

    sft_model = load_trk(model_file, "same", bbox_valid_check=False)
    model = sft_model.streamlines
    moved = set_number_of_points(moved, 20)
    rb = RecoBundles(moved,
                     verbose=True,
                     rng=np.random.RandomState(2001),
                     nb_pts=20)
    #model = set_number_of_points(model,20)
    recognized_bundle, bundle_labels = rb.recognize(model_bundle=model,
                                                    model_clust_thr=mct,
                                                    reduction_thr=rt,
                                                    reduction_distance='mam',
                                                    slr=True,
                                                    slr_metric='asymmetric',
                                                    pruning_distance='mam')
    return recognized_bundle, bundle_labels, model
Exemple #23
0
    def run(self,
            streamline_files,
            model_bundle_files,
            greater_than=50,
            less_than=1000000,
            no_slr=False,
            clust_thr=15.,
            reduction_thr=15.,
            reduction_distance='mdf',
            model_clust_thr=2.5,
            pruning_thr=8.,
            pruning_distance='mdf',
            slr_metric='symmetric',
            slr_transform='similarity',
            slr_matrix='small',
            refine=False,
            r_reduction_thr=12.,
            r_pruning_thr=6.,
            no_r_slr=False,
            out_dir='',
            out_recognized_transf='recognized.trk',
            out_recognized_labels='labels.npy'):
        """ Recognize bundles

        Parameters
        ----------
        streamline_files : string
            The path of streamline files where you want to recognize bundles.
        model_bundle_files : string
            The path of model bundle files.
        greater_than : int, optional
            Keep streamlines that have length greater than
            this value in mm.
        less_than : int, optional
            Keep streamlines have length less than this value
            in mm.
        no_slr : bool, optional
            Don't enable local Streamline-based Linear
            Registration.
        clust_thr : float, optional
            MDF distance threshold for all streamlines.
        reduction_thr : float, optional
            Reduce search space by (mm).
        reduction_distance : string, optional
            Reduction distance type can be mdf or mam.
        model_clust_thr : float, optional
            MDF distance threshold for the model bundles.
        pruning_thr : float, optional
            Pruning after matching.
        pruning_distance : string, optional
            Pruning distance type can be mdf or mam.
        slr_metric : string, optional
            Options are None, symmetric, asymmetric or diagonal.
        slr_transform : string, optional
            Transformation allowed. translation, rigid, similarity or scaling.
        slr_matrix : string, optional
            Options are 'nano', 'tiny', 'small', 'medium', 'large', 'huge'.
        refine : bool, optional
            Enable refine recognized bundle.
        r_reduction_thr : float, optional
            Refine reduce search space by (mm).
        r_pruning_thr : float, optional
            Refine pruning after matching.
        no_r_slr : bool, optional
            Don't enable Refine local Streamline-based Linear
            Registration.
        out_dir : string, optional
            Output directory. (default current directory)
        out_recognized_transf : string, optional
            Recognized bundle in the space of the model bundle.
        out_recognized_labels : string, optional
            Indices of recognized bundle in the original tractogram.

        References
        ----------
        .. [Garyfallidis17] Garyfallidis et al. Recognition of white matter
         bundles using local and global streamline-based registration and
         clustering, Neuroimage, 2017.

        .. [Chandio2020] Chandio, B.Q., Risacher, S.L., Pestilli, F.,
        Bullock, D., Yeh, FC., Koudoro, S., Rokem, A., Harezlak, J., and
        Garyfallidis, E. Bundle analytics, a computational framework for
        investigating the shapes and profiles of brain pathways across
        populations. Sci Rep 10, 17149 (2020)

        """
        slr = not no_slr
        r_slr = not no_r_slr

        bounds = [(-30, 30), (-30, 30), (-30, 30), (-45, 45), (-45, 45),
                  (-45, 45), (0.8, 1.2), (0.8, 1.2), (0.8, 1.2)]

        slr_matrix = slr_matrix.lower()
        if slr_matrix == 'nano':
            slr_select = (100, 100)
        if slr_matrix == 'tiny':
            slr_select = (250, 250)
        if slr_matrix == 'small':
            slr_select = (400, 400)
        if slr_matrix == 'medium':
            slr_select = (600, 600)
        if slr_matrix == 'large':
            slr_select = (800, 800)
        if slr_matrix == 'huge':
            slr_select = (1200, 1200)

        slr_transform = slr_transform.lower()
        if slr_transform == 'translation':
            bounds = bounds[:3]
        if slr_transform == 'rigid':
            bounds = bounds[:6]
        if slr_transform == 'similarity':
            bounds = bounds[:7]
        if slr_transform == 'scaling':
            bounds = bounds[:9]

        logging.info('### RecoBundles ###')

        io_it = self.get_io_iterator()

        t = time()
        logging.info(streamline_files)
        input_obj = load_tractogram(streamline_files,
                                    'same',
                                    bbox_valid_check=False)
        streamlines = input_obj.streamlines

        logging.info(' Loading time %0.3f sec' % (time() - t, ))

        rb = RecoBundles(streamlines,
                         greater_than=greater_than,
                         less_than=less_than)

        for _, mb, out_rec, out_labels in io_it:
            t = time()
            logging.info(mb)
            model_bundle = load_tractogram(mb, 'same',
                                           bbox_valid_check=False).streamlines
            logging.info(' Loading time %0.3f sec' % (time() - t, ))
            logging.info("model file = ")
            logging.info(mb)

            recognized_bundle, labels = \
                rb.recognize(
                    model_bundle,
                    model_clust_thr=model_clust_thr,
                    reduction_thr=reduction_thr,
                    reduction_distance=reduction_distance,
                    pruning_thr=pruning_thr,
                    pruning_distance=pruning_distance,
                    slr=slr,
                    slr_metric=slr_metric,
                    slr_x0=slr_transform,
                    slr_bounds=bounds,
                    slr_select=slr_select,
                    slr_method='L-BFGS-B')

            if refine:

                if len(recognized_bundle) > 1:

                    # affine
                    x0 = np.array([0, 0, 0, 0, 0, 0, 1., 1., 1, 0, 0, 0])
                    affine_bounds = [(-30, 30), (-30, 30), (-30, 30),
                                     (-45, 45), (-45, 45), (-45, 45),
                                     (0.8, 1.2), (0.8, 1.2), (0.8, 1.2),
                                     (-10, 10), (-10, 10), (-10, 10)]

                    recognized_bundle, labels = \
                        rb.refine(
                            model_bundle,
                            recognized_bundle,
                            model_clust_thr=model_clust_thr,
                            reduction_thr=r_reduction_thr,
                            reduction_distance=reduction_distance,
                            pruning_thr=r_pruning_thr,
                            pruning_distance=pruning_distance,
                            slr=r_slr,
                            slr_metric=slr_metric,
                            slr_x0=x0,
                            slr_bounds=affine_bounds,
                            slr_select=slr_select,
                            slr_method='L-BFGS-B')

            if len(labels) > 0:
                ba, bmd = rb.evaluate_results(model_bundle, recognized_bundle,
                                              slr_select)

                logging.info("Bundle adjacency Metric {0}".format(ba))
                logging.info("Bundle Min Distance Metric {0}".format(bmd))

            new_tractogram = StatefulTractogram(recognized_bundle,
                                                streamline_files, Space.RASMM)
            save_tractogram(new_tractogram, out_rec, bbox_valid_check=False)
            logging.info('Saving output files ...')
            np.save(out_labels, np.array(labels))
            logging.info(out_rec)
            logging.info(out_labels)
Exemple #24
0
"""

model_af_l_file, model_cst_l_file = get_two_hcp842_bundles()
"""
Extracting bundles using recobundles [Garyfallidis17]_
"""

sft_af_l = load_trk(model_af_l_file, "same", bbox_valid_check=False)
model_af_l = sft_af_l.streamlines

rb = RecoBundles(moved, verbose=True)

recognized_af_l, af_l_labels = rb.recognize(model_bundle=model_af_l,
                                            model_clust_thr=5.,
                                            reduction_thr=10,
                                            reduction_distance='mam',
                                            slr=True,
                                            slr_metric='asymmetric',
                                            pruning_distance='mam')
"""
let's visualize extracted Arcuate Fasciculus Left bundle and model bundle
together
"""

interactive = False

ren = window.Renderer()
ren.SetBackground(1, 1, 1)
ren.add(actor.line(model_af_l, colors=(.1, .7, .26)))
ren.add(actor.line(recognized_af_l, colors=(.1, .1, 6)))
ren.set_camera(focal_point=(320.21296692, 21.28884506, 17.2174015),
Exemple #25
0
if interactive:
    window.show(scene)
"""
.. figure:: AF_L_model_bundle.png
   :align: center

   Model Arcuate Fasciculus Left bundle

"""

rb = RecoBundles(moved, verbose=True, rng=np.random.RandomState(2001))

recognized_af_l, af_l_labels = rb.recognize(model_bundle=model_af_l,
                                            model_clust_thr=0.1,
                                            reduction_thr=15,
                                            pruning_thr=7,
                                            reduction_distance='mdf',
                                            pruning_distance='mdf',
                                            slr=True)
"""
let's visualize extracted Arcuate Fasciculus Left bundle
"""

interactive = False

scene = window.Scene()
scene.SetBackground(1, 1, 1)
scene.add(actor.line(recognized_af_l))
scene.set_camera(focal_point=(-18.17281532, -19.55606842, 6.92485857),
                 position=(-360.11, -30.46, -40.44),
                 view_up=(-0.03, 0.028, 0.89))
Exemple #26
0
    def run(self, streamline_files, model_bundle_files,
            greater_than=50, less_than=1000000,
            no_slr=False, clust_thr=15.,
            reduction_thr=15.,
            reduction_distance='mdf',
            model_clust_thr=2.5,
            pruning_thr=8.,
            pruning_distance='mdf',
            slr_metric='symmetric',
            slr_transform='similarity',
            slr_matrix='small',
            refine=False, r_reduction_thr=12.,
            r_pruning_thr=6., no_r_slr=False,
            out_dir='',
            out_recognized_transf='recognized.trk',
            out_recognized_labels='labels.npy'):
        """ Recognize bundles

        Parameters
        ----------
        streamline_files : string
            The path of streamline files where you want to recognize bundles
        model_bundle_files : string
            The path of model bundle files
        greater_than : int, optional
            Keep streamlines that have length greater than
            this value (default 50) in mm.
        less_than : int, optional
            Keep streamlines have length less than this value
            (default 1000000) in mm.
        no_slr : bool, optional
            Don't enable local Streamline-based Linear
            Registration (default False).
        clust_thr : float, optional
            MDF distance threshold for all streamlines (default 15)
        reduction_thr : float, optional
            Reduce search space by (mm) (default 15)
        reduction_distance : string, optional
            Reduction distance type can be mdf or mam (default mdf)
        model_clust_thr : float, optional
            MDF distance threshold for the model bundles (default 2.5)
        pruning_thr : float, optional
            Pruning after matching (default 8).
        pruning_distance : string, optional
            Pruning distance type can be mdf or mam (default mdf)
        slr_metric : string, optional
            Options are None, symmetric, asymmetric or diagonal
            (default symmetric).
        slr_transform : string, optional
            Transformation allowed. translation, rigid, similarity or scaling
            (Default 'similarity').
        slr_matrix : string, optional
            Options are 'nano', 'tiny', 'small', 'medium', 'large', 'huge'
            (default 'small')
        refine : bool, optional
            Enable refine recognized bunle (default False)
        r_reduction_thr : float, optional
            Refine reduce search space by (mm) (default 12)
        r_pruning_thr : float, optional
            Refine pruning after matching (default 6).
        no_r_slr : bool, optional
            Don't enable Refine local Streamline-based Linear
            Registration (default False).
        out_dir : string, optional
            Output directory (default input file directory)
        out_recognized_transf : string, optional
            Recognized bundle in the space of the model bundle
            (default 'recognized.trk')
        out_recognized_labels : string, optional
            Indices of recognized bundle in the original tractogram
            (default 'labels.npy')

        References
        ----------
        .. [Garyfallidis17] Garyfallidis et al. Recognition of white matter
         bundles using local and global streamline-based registration and
         clustering, Neuroimage, 2017.
        """
        slr = not no_slr
        r_slr = not no_r_slr

        bounds = [(-30, 30), (-30, 30), (-30, 30),
                  (-45, 45), (-45, 45), (-45, 45),
                  (0.8, 1.2), (0.8, 1.2), (0.8, 1.2)]

        slr_matrix = slr_matrix.lower()
        if slr_matrix == 'nano':
            slr_select = (100, 100)
        if slr_matrix == 'tiny':
            slr_select = (250, 250)
        if slr_matrix == 'small':
            slr_select = (400, 400)
        if slr_matrix == 'medium':
            slr_select = (600, 600)
        if slr_matrix == 'large':
            slr_select = (800, 800)
        if slr_matrix == 'huge':
            slr_select = (1200, 1200)

        slr_transform = slr_transform.lower()
        if slr_transform == 'translation':
            bounds = bounds[:3]
        if slr_transform == 'rigid':
            bounds = bounds[:6]
        if slr_transform == 'similarity':
            bounds = bounds[:7]
        if slr_transform == 'scaling':
            bounds = bounds[:9]

        logging.info('### RecoBundles ###')

        io_it = self.get_io_iterator()

        t = time()
        logging.info(streamline_files)
        streamlines, header = load_trk(streamline_files)

        logging.info(' Loading time %0.3f sec' % (time() - t,))

        rb = RecoBundles(streamlines, greater_than=greater_than,
                         less_than=less_than)

        for _, mb, out_rec, out_labels in io_it:
            t = time()
            logging.info(mb)
            model_bundle, _ = load_trk(mb)
            logging.info(' Loading time %0.3f sec' % (time() - t,))
            logging.info("model file = ")
            logging.info(mb)

            recognized_bundle, labels = \
                rb.recognize(
                    model_bundle,
                    model_clust_thr=model_clust_thr,
                    reduction_thr=reduction_thr,
                    reduction_distance=reduction_distance,
                    pruning_thr=pruning_thr,
                    pruning_distance=pruning_distance,
                    slr=slr,
                    slr_metric=slr_metric,
                    slr_x0=slr_transform,
                    slr_bounds=bounds,
                    slr_select=slr_select,
                    slr_method='L-BFGS-B')

            if refine:

                if len(recognized_bundle) > 1:

                    # affine
                    x0 = np.array([0, 0, 0, 0, 0, 0, 1., 1., 1, 0, 0, 0])
                    affine_bounds = [(-30, 30), (-30, 30), (-30, 30),
                                     (-45, 45), (-45, 45), (-45, 45),
                                     (0.8, 1.2), (0.8, 1.2), (0.8, 1.2),
                                     (-10, 10), (-10, 10), (-10, 10)]

                    recognized_bundle, labels = \
                        rb.refine(
                            model_bundle,
                            recognized_bundle,
                            model_clust_thr=model_clust_thr,
                            reduction_thr=r_reduction_thr,
                            reduction_distance=reduction_distance,
                            pruning_thr=r_pruning_thr,
                            pruning_distance=pruning_distance,
                            slr=r_slr,
                            slr_metric=slr_metric,
                            slr_x0=x0,
                            slr_bounds=affine_bounds,
                            slr_select=slr_select,
                            slr_method='L-BFGS-B')

            if len(labels) > 0:
                ba, bmd = rb.evaluate_results(
                             model_bundle, recognized_bundle,
                             slr_select)

                logging.info("Bundle adjacency Metric {0}".format(ba))
                logging.info("Bundle Min Distance Metric {0}".format(bmd))

            save_trk(out_rec, recognized_bundle, np.eye(4))

            logging.info('Saving output files ...')
            np.save(out_labels, np.array(labels))
            logging.info(out_rec)
            logging.info(out_labels)
Exemple #27
0
    def run(self,
            streamline_files,
            model_bundle_files,
            greater_than=50,
            less_than=1000000,
            no_slr=False,
            clust_thr=15.,
            reduction_thr=15.,
            reduction_distance='mdf',
            model_clust_thr=2.5,
            pruning_thr=8.,
            pruning_distance='mdf',
            slr_metric='symmetric',
            slr_transform='similarity',
            slr_matrix='small',
            refine=False,
            r_reduction_thr=12.,
            r_pruning_thr=6.,
            no_r_slr=False,
            out_dir='',
            out_recognized_transf='recognized.trk',
            out_recognized_labels='labels.npy'):
        """ Recognize bundles

        Parameters
        ----------
        streamline_files : string
            The path of streamline files where you want to recognize bundles
        model_bundle_files : string
            The path of model bundle files
        greater_than : int, optional
            Keep streamlines that have length greater than
            this value (default 50) in mm.
        less_than : int, optional
            Keep streamlines have length less than this value
            (default 1000000) in mm.
        no_slr : bool, optional
            Don't enable local Streamline-based Linear
            Registration (default False).
        clust_thr : float, optional
            MDF distance threshold for all streamlines (default 15)
        reduction_thr : float, optional
            Reduce search space by (mm) (default 15)
        reduction_distance : string, optional
            Reduction distance type can be mdf or mam (default mdf)
        model_clust_thr : float, optional
            MDF distance threshold for the model bundles (default 2.5)
        pruning_thr : float, optional
            Pruning after matching (default 8).
        pruning_distance : string, optional
            Pruning distance type can be mdf or mam (default mdf)
        slr_metric : string, optional
            Options are None, symmetric, asymmetric or diagonal
            (default symmetric).
        slr_transform : string, optional
            Transformation allowed. translation, rigid, similarity or scaling
            (Default 'similarity').
        slr_matrix : string, optional
            Options are 'nano', 'tiny', 'small', 'medium', 'large', 'huge'
            (default 'small')
        refine : bool, optional
            Enable refine recognized bunle (default False)
        r_reduction_thr : float, optional
            Refine reduce search space by (mm) (default 12)
        r_pruning_thr : float, optional
            Refine pruning after matching (default 6).
        no_r_slr : bool, optional
            Don't enable Refine local Streamline-based Linear
            Registration (default False).
        out_dir : string, optional
            Output directory (default input file directory)
        out_recognized_transf : string, optional
            Recognized bundle in the space of the model bundle
            (default 'recognized.trk')
        out_recognized_labels : string, optional
            Indices of recognized bundle in the original tractogram
            (default 'labels.npy')

        References
        ----------
        .. [Garyfallidis17] Garyfallidis et al. Recognition of white matter
         bundles using local and global streamline-based registration and
         clustering, Neuroimage, 2017.
        """
        slr = not no_slr
        r_slr = not no_r_slr

        bounds = [(-30, 30), (-30, 30), (-30, 30), (-45, 45), (-45, 45),
                  (-45, 45), (0.8, 1.2), (0.8, 1.2), (0.8, 1.2)]

        slr_matrix = slr_matrix.lower()
        if slr_matrix == 'nano':
            slr_select = (100, 100)
        if slr_matrix == 'tiny':
            slr_select = (250, 250)
        if slr_matrix == 'small':
            slr_select = (400, 400)
        if slr_matrix == 'medium':
            slr_select = (600, 600)
        if slr_matrix == 'large':
            slr_select = (800, 800)
        if slr_matrix == 'huge':
            slr_select = (1200, 1200)

        slr_transform = slr_transform.lower()
        if slr_transform == 'translation':
            bounds = bounds[:3]
        if slr_transform == 'rigid':
            bounds = bounds[:6]
        if slr_transform == 'similarity':
            bounds = bounds[:7]
        if slr_transform == 'scaling':
            bounds = bounds[:9]

        logging.info('### RecoBundles ###')

        io_it = self.get_io_iterator()

        t = time()
        logging.info(streamline_files)
        input_obj = nib.streamlines.load(streamline_files)
        streamlines = input_obj.streamlines

        logging.info(' Loading time %0.3f sec' % (time() - t, ))

        rb = RecoBundles(streamlines,
                         greater_than=greater_than,
                         less_than=less_than)

        for _, mb, out_rec, out_labels in io_it:
            t = time()
            logging.info(mb)
            model_bundle = nib.streamlines.load(mb).streamlines
            logging.info(' Loading time %0.3f sec' % (time() - t, ))
            logging.info("model file = ")
            logging.info(mb)

            recognized_bundle, labels = \
                rb.recognize(
                    model_bundle,
                    model_clust_thr=model_clust_thr,
                    reduction_thr=reduction_thr,
                    reduction_distance=reduction_distance,
                    pruning_thr=pruning_thr,
                    pruning_distance=pruning_distance,
                    slr=slr,
                    slr_metric=slr_metric,
                    slr_x0=slr_transform,
                    slr_bounds=bounds,
                    slr_select=slr_select,
                    slr_method='L-BFGS-B')

            if refine:

                if len(recognized_bundle) > 1:

                    # affine
                    x0 = np.array([0, 0, 0, 0, 0, 0, 1., 1., 1, 0, 0, 0])
                    affine_bounds = [(-30, 30), (-30, 30), (-30, 30),
                                     (-45, 45), (-45, 45), (-45, 45),
                                     (0.8, 1.2), (0.8, 1.2), (0.8, 1.2),
                                     (-10, 10), (-10, 10), (-10, 10)]

                    recognized_bundle, labels = \
                        rb.refine(
                            model_bundle,
                            recognized_bundle,
                            model_clust_thr=model_clust_thr,
                            reduction_thr=r_reduction_thr,
                            reduction_distance=reduction_distance,
                            pruning_thr=r_pruning_thr,
                            pruning_distance=pruning_distance,
                            slr=r_slr,
                            slr_metric=slr_metric,
                            slr_x0=x0,
                            slr_bounds=affine_bounds,
                            slr_select=slr_select,
                            slr_method='L-BFGS-B')

            if len(labels) > 0:
                ba, bmd = rb.evaluate_results(model_bundle, recognized_bundle,
                                              slr_select)

                logging.info("Bundle adjacency Metric {0}".format(ba))
                logging.info("Bundle Min Distance Metric {0}".format(bmd))

            new_tractogram = nib.streamlines.Tractogram(
                recognized_bundle, affine_to_rasmm=np.eye(4))
            nib.streamlines.save(new_tractogram,
                                 out_rec,
                                 header=input_obj.header)
            logging.info('Saving output files ...')
            np.save(out_labels, np.array(labels))
            logging.info(out_rec)
            logging.info(out_labels)