示例#1
0
def load_tractogram_with_reference(parser,
                                   args,
                                   filepath,
                                   bbox_check=True,
                                   arg_name=None):

    _, ext = os.path.splitext(filepath)
    if ext == '.trk':
        sft = load_tractogram(filepath, 'same', bbox_valid_check=bbox_check)
    elif ext in ['.tck', '.fib', '.vtk', '.dpy']:
        if arg_name:
            arg_ref = arg_name + '_ref'
            if args.__getattribute__(arg_ref):
                sft = load_tractogram(filepath,
                                      args.__getattribute__(arg_ref),
                                      bbox_valid_check=bbox_check)
            else:
                parser.error('--{} is required for this file format '
                             '{}.'.format(arg_ref, filepath))
        elif args.reference is None:
            parser.error('--reference is required for this file format '
                         '{}.'.format(filepath))

        else:
            sft = load_tractogram(filepath,
                                  args.reference,
                                  bbox_valid_check=bbox_check)

    else:
        parser.error('{} is an unsupported file format'.format(filepath))

    return sft
示例#2
0
def read_hcp_atlas_16_bundles():
    """
    XXX
    """
    bundle_dict = {}
    _, folder = fetch_hcp_atlas_16_bundles()
    whole_brain = load_tractogram(op.join(folder,
                                          'Atlas_in_MNI_Space_16_bundles',
                                          'whole_brain',
                                          'whole_brain_MNI.trk'),
                                  'same',
                                  bbox_valid_check=False).streamlines
    bundle_dict['whole_brain'] = whole_brain
    bundle_files = glob(
        op.join(folder, "Atlas_in_MNI_Space_16_bundles", "bundles", "*.trk"))
    for bundle_file in bundle_files:
        bundle = op.splitext(op.split(bundle_file)[-1])[0]
        bundle_dict[bundle] = {}
        bundle_dict[bundle]['sl'] = load_tractogram(bundle_file,
                                                    'same',
                                                    bbox_valid_check=False)\
            .streamlines

        feature = ResampleFeature(nb_points=100)
        metric = AveragePointwiseEuclideanMetric(feature)
        qb = QuickBundles(np.inf, metric=metric)
        cluster = qb.cluster(bundle_dict[bundle]['sl'])
        bundle_dict[bundle]['centroid'] = cluster.centroids[0]

    # For some reason, this file-name has a 0 in it, instead of an O:
    bundle_dict["IFOF_R"] = bundle_dict["IF0F_R"]
    del bundle_dict["IF0F_R"]
    return bundle_dict
示例#3
0
def test_recobundles_flow():
    with TemporaryDirectory() as out_dir:
        data_path = get_fnames('fornix')

        fornix = load_tractogram(data_path, 'same',
                                 bbox_valid_check=False).streamlines

        f = Streamlines(fornix)
        f1 = f.copy()

        f2 = f1[:15].copy()
        f2._data += np.array([40, 0, 0])

        f.extend(f2)

        f2_path = pjoin(out_dir, "f2.trk")
        sft = StatefulTractogram(f2, data_path, Space.RASMM)
        save_tractogram(sft, f2_path, bbox_valid_check=False)

        f1_path = pjoin(out_dir, "f1.trk")
        sft = StatefulTractogram(f, data_path, Space.RASMM)
        save_tractogram(sft, f1_path, bbox_valid_check=False)

        rb_flow = RecoBundlesFlow(force=True)
        rb_flow.run(f1_path,
                    f2_path,
                    greater_than=0,
                    clust_thr=10,
                    model_clust_thr=5.,
                    reduction_thr=10,
                    out_dir=out_dir)

        labels = rb_flow.last_generated_outputs['out_recognized_labels']
        recog_trk = rb_flow.last_generated_outputs['out_recognized_transf']

        rec_bundle = load_tractogram(recog_trk, 'same',
                                     bbox_valid_check=False).streamlines
        npt.assert_equal(len(rec_bundle) == len(f2), True)

        label_flow = LabelsBundlesFlow(force=True)
        label_flow.run(f1_path, labels)

        recog_bundle = label_flow.last_generated_outputs['out_bundle']
        rec_bundle_org = load_tractogram(recog_bundle,
                                         'same',
                                         bbox_valid_check=False).streamlines

        BMD = BundleMinDistanceMetric()
        nb_pts = 20
        static = set_number_of_points(f2, nb_pts)
        moving = set_number_of_points(rec_bundle_org, nb_pts)

        BMD.setup(static, moving)
        x0 = np.array([0, 0, 0, 0, 0, 0, 1., 1., 1, 0, 0, 0])  # affine
        bmd_value = BMD.distance(x0.tolist())

        npt.assert_equal(bmd_value < 1, True)
示例#4
0
def to_vox_equivalence():
    sft_1 = load_tractogram(filepath_dix['gs.trk'], filepath_dix['gs.nii'],
                            to_space=Space.RASMM)
    sft_2 = load_tractogram(filepath_dix['gs.trk'], filepath_dix['gs.nii'],
                            to_space=Space.RASMM)

    sft_1.to_vox()
    sft_2.to_space(Space.VOX)
    assert_allclose(sft_1.streamlines.get_data(),
                    sft_2.streamlines.get_data(), atol=1e-3, rtol=1e-6)
示例#5
0
def to_center_equivalence():
    sft_1 = load_tractogram(filepath_dix['gs.trk'], filepath_dix['gs.nii'],
                            to_space=Space.VOX)
    sft_2 = load_tractogram(filepath_dix['gs.trk'], filepath_dix['gs.nii'],
                            to_space=Space.VOX)

    sft_1.to_center()
    sft_2.to_origin(Origin.NIFTI)
    assert_allclose(sft_1.streamlines.get_data(),
                    sft_2.streamlines.get_data(), atol=1e-3, rtol=1e-6)
def shift_corner_from_voxmm():
    sft_1 = load_tractogram(filepath_dix['gs.trk'], filepath_dix['gs.nii'],
                            to_space=Space.VOX)
    sft_1.to_corner()
    bbox_1 = sft_1.compute_bounding_box()

    sft_2 = load_tractogram(filepath_dix['gs.trk'], filepath_dix['gs.nii'],
                            to_space=Space.VOXMM)
    sft_2.to_corner()
    sft_2.to_vox()
    bbox_2 = sft_2.compute_bounding_box()

    assert_allclose(bbox_1, bbox_2, atol=1e-3, rtol=1e-6)
def dpy_iterative_saving_loading():
    sft = load_tractogram(filepath_dix['gs.dpy'], filepath_dix['gs.nii'],
                          to_space=Space.RASMM)
    with InTemporaryDirectory():
        save_tractogram(sft, 'gs_iter.dpy')
        tmp_points_rasmm = np.loadtxt(filepath_dix['gs_rasmm_space.txt'])

        for _ in range(100):
            sft_iter = load_tractogram('gs_iter.dpy', filepath_dix['gs.nii'],
                                       to_space=Space.RASMM)
            assert_allclose(tmp_points_rasmm,
                            sft_iter.streamlines.data,
                            atol=1e-3, rtol=1e-6)
            save_tractogram(sft_iter, 'gs_iter.dpy')
示例#8
0
def to_corner_equivalence():
    sft_1 = load_tractogram(filepath_dix['gs.trk'],
                            filepath_dix['gs.nii'],
                            to_space=Space.VOX)
    sft_2 = load_tractogram(filepath_dix['gs.trk'],
                            filepath_dix['gs.nii'],
                            to_space=Space.VOX)

    sft_1.to_corner()
    sft_2.to_origin(Origin.TRACKVIS)
    assert_allclose(sft_1.streamlines.data,
                    sft_2.streamlines.data,
                    atol=1e-3,
                    rtol=1e-6)
示例#9
0
def test_bundle_shape_analysis_flow():

    with TemporaryDirectory() as dirpath:
        data_path = get_fnames('fornix')
        fornix = load_tractogram(data_path, 'same',
                                 bbox_valid_check=False).streamlines

        f = Streamlines(fornix)

        mb = os.path.join(dirpath, "model_bundles")
        sub = os.path.join(dirpath, "subjects")

        os.mkdir(mb)
        sft = StatefulTractogram(f, data_path, Space.RASMM)
        save_tractogram(sft,
                        os.path.join(mb, "temp.trk"),
                        bbox_valid_check=False)

        os.mkdir(sub)

        os.mkdir(os.path.join(sub, "patient"))

        os.mkdir(os.path.join(sub, "control"))

        p = os.path.join(sub, "patient", "10001")
        os.mkdir(p)

        c = os.path.join(sub, "control", "20002")
        os.mkdir(c)

        for pre in [p, c]:

            os.mkdir(os.path.join(pre, "rec_bundles"))

            sft = StatefulTractogram(f, data_path, Space.RASMM)
            save_tractogram(sft,
                            os.path.join(pre, "rec_bundles", "temp.trk"),
                            bbox_valid_check=False)
            os.mkdir(os.path.join(pre, "org_bundles"))

            sft = StatefulTractogram(f, data_path, Space.RASMM)
            save_tractogram(sft,
                            os.path.join(pre, "org_bundles", "temp.trk"),
                            bbox_valid_check=False)
            os.mkdir(os.path.join(pre, "anatomical_measures"))

            fa = np.random.rand(255, 255, 255)

            save_nifti(os.path.join(pre, "anatomical_measures", "fa.nii.gz"),
                       fa,
                       affine=np.eye(4))

        out_dir = os.path.join(dirpath, "output")
        os.mkdir(out_dir)

        sm_flow = BundleShapeAnalysis()

        sm_flow.run(sub, out_dir=out_dir)

        assert_true(os.path.exists(os.path.join(out_dir, "temp.npy")))
示例#10
0
def test_invalid_streamlines_epsilon():

    sft = load_tractogram(filepath_dix['gs.trk'], filepath_dix['gs.nii'])
    src_strml_count = len(sft)

    epsilon = 1e-6
    obtained_idx_to_remove, obtained_idx_to_keep = \
        sft.remove_invalid_streamlines(epsilon)

    expected_idx_to_keep = list(range(src_strml_count))

    assert len(obtained_idx_to_remove) == 0
    assert expected_idx_to_keep == obtained_idx_to_keep
    assert_(len(sft) == src_strml_count,
            msg='A small epsilon should not remove any streamlines')

    epsilon = 1.0
    obtained_idx_to_remove, obtained_idx_to_keep = \
        sft.remove_invalid_streamlines(epsilon)

    expected_idx_to_remove = [0, 1, 2, 3, 4, 5, 6, 7]
    expected_idx_to_keep = [8, 9, 10, 11, 12]
    expected_len_sft = 5

    expected_removed_strml_count = src_strml_count - expected_len_sft

    assert obtained_idx_to_remove == expected_idx_to_remove
    assert obtained_idx_to_keep == expected_idx_to_keep
    assert_(
        len(sft) == expected_len_sft,
        msg='Too big of an epsilon ({} mm) should have removed {} streamlines '
        '({} corners)'.format(epsilon, expected_removed_strml_count,
                              expected_removed_strml_count))
示例#11
0
def test_invalid_streamlines():

    sft = load_tractogram(filepath_dix['gs.trk'], filepath_dix['gs.nii'])
    src_strml_count = len(sft)

    obtained_idx_to_remove, obtained_idx_to_keep = \
        sft.remove_invalid_streamlines()

    expected_idx_to_keep = list(range(src_strml_count))

    assert len(obtained_idx_to_remove) == 0
    assert expected_idx_to_keep == obtained_idx_to_keep
    assert_(
        len(sft) == src_strml_count,
        msg='An unshifted gold standard should have {} invalid streamlines'.
        format(src_strml_count - src_strml_count))

    # Change the dimensions so that a few streamlines become invalid
    sft.dimensions[2] = 5

    obtained_idx_to_remove, obtained_idx_to_keep = \
        sft.remove_invalid_streamlines()

    expected_idx_to_remove = [1, 3, 5, 7, 8, 9, 10, 11]
    expected_idx_to_keep = [0, 2, 4, 6, 12]
    expected_len_sft = 5

    assert obtained_idx_to_remove == expected_idx_to_remove
    assert obtained_idx_to_keep == expected_idx_to_keep
    assert_(len(sft) == expected_len_sft,
            msg='The shifted gold standard should have {} invalid streamlines'.
            format(src_strml_count - expected_len_sft))
示例#12
0
def test_basic_addition():
    sft = load_tractogram(filepath_dix['gs.trk'], filepath_dix['gs.nii'])
    sft_first_half = sft[0:7]
    sft_last_half = sft[7:13]

    concatenate_sft = sft_first_half + sft_last_half
    assert_(concatenate_sft == sft, msg='sft were not added correctly')
示例#13
0
def io_tractogram(extension):
    with InTemporaryDirectory():
        fname = 'test.{}'.format(extension)

        in_affine = np.eye(4)
        in_dimensions = np.array([50, 50, 50])
        in_voxel_sizes = np.array([2, 1.5, 1.5])
        nii_header = create_nifti_header(in_affine, in_dimensions,
                                         in_voxel_sizes)
        sft = StatefulTractogram(streamlines, nii_header, space=Space.RASMM)

        save_tractogram(sft, fname, bbox_valid_check=False)

        if extension == 'trk':
            reference = 'same'
        else:
            reference = nii_header

        sft = load_tractogram(fname, reference, bbox_valid_check=False)
        affine, dimensions, voxel_sizes, _ = sft.space_attribute

        npt.assert_array_equal(in_affine, affine)
        npt.assert_array_equal(in_voxel_sizes, voxel_sizes)
        npt.assert_array_equal(in_dimensions, dimensions)
        npt.assert_equal(len(sft), len(streamlines))
        npt.assert_array_almost_equal(sft.streamlines[1], streamline,
                                      decimal=4)
示例#14
0
文件: test_utils.py 项目: dPys/PyNets
def test_evaluate_streamline_plausibility(dmri_estimation_data,
                                          tractography_estimation_data):
    """
    Test evaluate_streamline_plausibility functionality
    """
    import nibabel as nib
    from pynets.dmri.utils import evaluate_streamline_plausibility
    from dipy.io.stateful_tractogram import Space, Origin
    from dipy.io.streamline import load_tractogram
    from dipy.io import load_pickle

    gtab_file = dmri_estimation_data['gtab_file']
    dwi_path = dmri_estimation_data['dwi_file']
    mask_file = tractography_estimation_data['mask']
    streams = tractography_estimation_data['trk']

    gtab = load_pickle(gtab_file)
    dwi_img = nib.load(dwi_path)
    dwi_data = dwi_img.get_fdata()
    mask_img = nib.load(mask_file)
    mask_data = mask_img.get_fdata()
    tractogram = load_tractogram(
        streams,
        mask_img,
        to_origin=Origin.NIFTI,
        to_space=Space.VOXMM,
        bbox_valid_check=True,
    )
    streamlines = tractogram.streamlines
    cleaned = evaluate_streamline_plausibility(dwi_data, gtab, mask_data,
                                               streamlines)

    assert len(cleaned) > 0
    assert len(cleaned) <= len(streamlines)
示例#15
0
def convert_trk(tractogram, outtype='tck', output=None, force=False):
    ''' Convert presumed trk file to either a tck file, or a vtk file
    '''
    # figure out input type, load file

    if nib.streamlines.detect_format(tractogram) is not nib.streamlines.TrkFile:
        print("Skipping non TRK file: '{}'".format(tractogram))
        return

    if output:
        output_filename = output
    else:
        output_filename = tractogram[:-4] + '.{}'.format(outtype)
        if os.path.isfile(output_filename) and not force:
            print("Skipping existing file: '{}'. Use -f to overwrite.".format(output_filename))
            return

    print("Converting file: {}\n".format(output_filename))
    # load tractogram, set origin to the corner
    trk = load_tractogram(tractogram, reference='same')
    trk.to_corner()  # set origin to the corner
    if  outtype == 'tck':
        save_tractogram(trk, output_filename)
    else:
        dipy.io.vtk.save_vtk_streamlines(trk, output_filename)
示例#16
0
def bounding_bbox_valid(shift):
    sft = load_tractogram(filepath_dix['gs.trk'],
                          filepath_dix['gs.nii'],
                          shifted_origin=shift,
                          bbox_valid_check=False)

    return sft.is_bbox_in_vox_valid()
示例#17
0
def prepare_data_for_actors(bundle_filename, reference_filename,
                            target_template_filename):
    sft = load_tractogram(bundle_filename, reference_filename)
    streamlines = sft.streamlines

    # Load and prepare the data
    reference_img = nib.load(reference_filename)
    reference_data = reference_img.get_fdata(dtype=np.float32)
    reference_affine = reference_img.affine

    if target_template_filename:
        target_template_img = nib.load(target_template_filename)
        target_template_data = target_template_img.get_fdata(dtype=np.float32)
        target_template_affine = target_template_img.affine

        # Register the DWI data to the template
        logging.debug('Starting registration...')
        transformed_reference, transformation = register_image(
            target_template_data, target_template_affine, reference_data,
            reference_affine)
        logging.debug('Transforming streamlines...')
        streamlines = transform_streamlines(streamlines,
                                            np.linalg.inv(transformation),
                                            in_place=True)

        new_sft = StatefulTractogram(streamlines, target_template_filename,
                                     Space.RASMM)

        return new_sft, transformed_reference

    return sft, reference_data
示例#18
0
def main():
    parser = _build_arg_parser()
    args = parser.parse_args()
    assert_inputs_exist(parser, [args.tractogram])
    assert_outputs_exist(parser, args, [], [args.save])

    tracts_format = detect_format(args.tractogram)
    if tracts_format is not TrkFile:
        raise ValueError("Invalid input streamline file format " +
                         "(must be trk): {0}".format(args.tractogram_filename))

    # Load files and data. TRKs can have 'same' as reference
    tractogram = load_tractogram(args.tractogram, 'same')
    # Streamlines are saved in RASMM but seeds are saved in VOX
    # This might produce weird behavior with non-iso
    tractogram.to_vox()

    streamlines = tractogram.streamlines
    if 'seeds' not in tractogram.data_per_streamline:
        parser.error('Tractogram does not contain seeds')
    seeds = tractogram.data_per_streamline['seeds']

    # Make display objects
    streamlines_actor = actor.line(streamlines)
    points = actor.dots(seeds, color=(1., 1., 1.))

    # Add display objects to canvas
    s = window.Scene()
    s.add(streamlines_actor)
    s.add(points)

    # Show and record if needed
    if args.save is not None:
        window.record(s, out_path=args.save, size=(1000, 1000))
    window.show(s)
示例#19
0
def bounding_bbox_valid(standard):
    sft = load_tractogram(filepath_dix['gs.trk'],
                          filepath_dix['gs.nii'],
                          to_origin=standard,
                          bbox_valid_check=False)

    return sft.is_bbox_in_vox_valid()
示例#20
0
def test_slr_flow():
    with TemporaryDirectory() as out_dir:
        data_path = get_fnames('fornix')

        fornix = load_tractogram(data_path, 'same',
                                 bbox_valid_check=False).streamlines

        f = Streamlines(fornix)
        f1 = f.copy()

        f1_path = pjoin(out_dir, "f1.trk")
        sft = StatefulTractogram(f1, data_path, Space.RASMM)
        save_tractogram(sft, f1_path, bbox_valid_check=False)

        f2 = f1.copy()
        f2._data += np.array([50, 0, 0])

        f2_path = pjoin(out_dir, "f2.trk")
        sft = StatefulTractogram(f2, data_path, Space.RASMM)
        save_tractogram(sft, f2_path, bbox_valid_check=False)

        slr_flow = SlrWithQbxFlow(force=True)
        slr_flow.run(f1_path, f2_path)

        out_path = slr_flow.last_generated_outputs['out_moved']

        npt.assert_equal(os.path.isfile(out_path), True)
示例#21
0
def test_io_streamline():
    with InTemporaryDirectory():
        fname = 'test.trk'
        affine = np.eye(4)

        # Test save
        save_tractogram(fname, streamlines, affine,
                        vox_size=np.array([2, 1.5, 1.5]),
                        shape=np.array([50, 50, 50]))
        tfile = nib.streamlines.load(fname)
        npt.assert_array_equal(affine, tfile.affine)
        npt.assert_array_equal(np.array([2, 1.5, 1.5]),
                               tfile.header.get('voxel_sizes'))
        npt.assert_array_equal(np.array([50, 50, 50]),
                               tfile.header.get('dimensions'))
        npt.assert_equal(len(tfile.streamlines), len(streamlines))
        npt.assert_array_almost_equal(tfile.streamlines[1], streamline,
                                      decimal=4)

        # Test basic save
        save_tractogram(fname, streamlines, affine)
        tfile = nib.streamlines.load(fname)
        npt.assert_array_equal(affine, tfile.affine)
        npt.assert_equal(len(tfile.streamlines), len(streamlines))
        npt.assert_array_almost_equal(tfile.streamlines[1], streamline,
                                      decimal=5)

        # Test Load
        local_streamlines, hdr = load_tractogram(fname)
        npt.assert_equal(len(local_streamlines), len(streamlines))
        for arr1, arr2 in zip(local_streamlines, streamlines):
            npt.assert_allclose(arr1, arr2)
def main():
    args = parse_args()

    if os.path.exists(args.output) and not args.force:
        raise ValueError("Output already exists! Use --force to overwrite.")

    sft = load_tractogram(args.input, args.ref,
                          to_space=Space.RASMM,
                          trk_header_check=False,
                          bbox_valid_check=False)

    # There may be invalid streamlines in the input tractogram
    sft.remove_invalid_streamlines()

    # Work in voxel space, and move streamlines to corner so we can use floor()
    # to compare with valid voxel coordinates.
    sft.to_vox()
    sft.to_corner()

    mask = nib.load(args.mask)

    valid_voxels = np.where(mask.get_fdata() > 0.5)

    valid_streamlines = []

    for s in sft.streamlines:
        if _is_coords_valid(s[0], valid_voxels):
            valid_streamlines.append(s)
        elif _is_coords_valid(s[-1], valid_voxels):
            valid_streamlines.append(s[::-1])

    valid_sft = StatefulTractogram(valid_streamlines, args.ref, space=sft.space, shifted_origin=sft.shifted_origin)
    save_tractogram(valid_sft, args.output)
示例#23
0
def compute_voxel_measures(args):
    bundle_filename, bundle_reference = args[0]
    tracking_mask = args[1]
    gs_binary_3d = args[2]

    bundle_sft = load_tractogram(bundle_filename, bundle_reference)
    bundle_sft.to_vox()
    bundle_sft.to_corner()
    bundle_streamlines = bundle_sft.streamlines
    _, bundle_dimensions, _, _ = bundle_sft.space_attributes

    if not bundle_streamlines:
        logging.info('{} is empty'.format(bundle_filename))
        return None

    binary_3d = compute_tract_counts_map(bundle_streamlines, bundle_dimensions)
    binary_3d[binary_3d > 0] = 1

    binary_3d_indices = np.where(binary_3d.flatten() > 0)[0]
    gs_binary_3d_indices = np.where(gs_binary_3d.flatten() > 0)[0]

    voxels_binary = binary_classification(
        binary_3d_indices,
        gs_binary_3d_indices,
        int(np.prod(tracking_mask.shape)),
        mask_count=np.count_nonzero(tracking_mask))

    return dict(
        zip([
            'sensitivity_voxels', 'specificity_voxels', 'precision_voxels',
            'accuracy_voxels', 'dice_voxels', 'kappa_voxels', 'youden_voxels'
        ], voxels_binary))
示例#24
0
def test_LSCv2(verbose=False):
    xyz1 = np.array([[1, 0, 0], [2, 0, 0], [3, 0, 0]], dtype='float32')
    xyz2 = np.array([[1, 0, 0], [1, 2, 0], [1, 3, 0]], dtype='float32')
    xyz3 = np.array([[1.1, 0, 0], [1, 2, 0], [1, 3, 0]], dtype='float32')
    xyz4 = np.array([[1, 0, 0], [2.1, 0, 0], [3, 0, 0]], dtype='float32')

    xyz5 = np.array([[100, 0, 0], [200, 0, 0], [300, 0, 0]], dtype='float32')
    xyz6 = np.array([[0, 20, 0], [0, 40, 0], [300, 50, 0]], dtype='float32')

    T = [xyz1, xyz2, xyz3, xyz4, xyz5, xyz6]
    pf.local_skeleton_clustering(T, 0.2)

    pf.local_skeleton_clustering_3pts(T, 0.2)

    for i in range(40):
        xyz = np.random.rand(3, 3).astype('f4')
        T.append(xyz)

    from time import time
    t1 = time()
    C3 = pf.local_skeleton_clustering(T, .5)
    t2 = time()
    if verbose:
        print(t2 - t1)
        print(len(C3))

    t1 = time()
    C4 = pf.local_skeleton_clustering_3pts(T, .5)
    t2 = time()
    if verbose:
        print(t2 - t1)
        print(len(C4))

    for c in C3:
        assert_equal(np.sum(C3[c]['hidden'] - C4[c]['hidden']), 0)

    T2 = []
    for i in range(10**4):
        xyz = np.random.rand(10, 3).astype('f4')
        T2.append(xyz)
    t1 = time()
    C5 = pf.local_skeleton_clustering(T2, .5)
    t2 = time()
    if verbose:
        print(t2 - t1)
        print(len(C5))

    fname = get_fnames('fornix')
    fornix = load_tractogram(fname, 'same', bbox_valid_check=False).streamlines

    T3 = set_number_of_points(fornix, 6)

    if verbose:
        print('lenT3', len(T3))

    C = pf.local_skeleton_clustering(T3, 10.)

    if verbose:
        print('lenC', len(C))
    """
示例#25
0
def compute_streamlines_measures(args):
    bundle_filename, bundle_reference = args[0]
    wb_streamlines = args[1]
    gs_streamlines_indices = args[2]

    if not os.path.isfile(bundle_filename):
        logging.info('{} does not exist'.format(bundle_filename))
        return None

    bundle_sft = load_tractogram(bundle_filename, bundle_reference)
    bundle_sft.to_vox()
    bundle_sft.to_corner()
    bundle_streamlines = bundle_sft.streamlines
    _, bundle_dimensions, _, _ = bundle_sft.space_attributes

    if not bundle_streamlines:
        logging.info('{} is empty'.format(bundle_filename))
        return None

    _, streamlines_indices = perform_streamlines_operation(
        intersection, [wb_streamlines, bundle_streamlines], precision=0)

    streamlines_binary = binary_classification(streamlines_indices,
                                               gs_streamlines_indices,
                                               len(wb_streamlines))

    return dict(
        zip([
            'sensitivity_streamlines', 'specificity_streamlines',
            'precision_streamlines', 'accuracy_streamlines',
            'dice_streamlines', 'kappa_streamlines', 'youden_streamlines'
        ], streamlines_binary))
def random_streamline_color():
    np.random.seed(0)
    sft = load_tractogram(filepath_dix['gs.tck'], filepath_dix['gs.nii'])

    uniform_colors_x = np.random.randint(0, 255, (13, 1))
    uniform_colors_y = np.random.randint(0, 255, (13, 1))
    uniform_colors_z = np.random.randint(0, 255, (13, 1))
    uniform_colors_x = np.expand_dims(
        np.repeat(uniform_colors_x, 8, axis=1), axis=-1)
    uniform_colors_y = np.expand_dims(
        np.repeat(uniform_colors_y, 8, axis=1), axis=-1)
    uniform_colors_z = np.expand_dims(
        np.repeat(uniform_colors_z, 8, axis=1), axis=-1)

    coloring_dict = {}
    coloring_dict['color_x'] = uniform_colors_x
    coloring_dict['color_y'] = uniform_colors_y
    coloring_dict['color_z'] = uniform_colors_z

    try:
        sft.data_per_point = coloring_dict
        with InTemporaryDirectory():
            save_tractogram(sft, 'random_streamlines_color.trk')
        return True
    except (TypeError, ValueError):
        return False
def remove_invalid_streamlines(resize):
    sft = load_tractogram(filepath_dix['gs.trk'], filepath_dix['gs.nii'])
    if resize:
        sft._dimensions[2] = 5

    sft.remove_invalid_streamlines()
    return len(sft)
示例#28
0
def get_streamlines():
    from dipy.data import get_fnames
    from dipy.io.streamline import load_tractogram

    fname = get_fnames('fornix')
    fornix = load_tractogram(fname, 'same', bbox_valid_check=False)

    return fornix.streamlines
示例#29
0
def fib_equal_in_rasmm_space():
    if not have_fury:
        return
    sft = load_tractogram(filepath_dix['gs.fib'], filepath_dix['gs.nii'],
                          to_space=Space.RASMM)
    tmp_points_rasmm = np.loadtxt(filepath_dix['gs_rasmm_space.txt'])
    assert_allclose(tmp_points_rasmm,
                    sft.streamlines.get_data(), atol=1e-3, rtol=1e-6)
示例#30
0
def fornix_streamlines(no_pts=12):
    fname = get_fnames('fornix')

    fornix = load_tractogram(fname, 'same', bbox_valid_check=False).streamlines

    fornix_streamlines = Streamlines(fornix)
    streamlines = set_number_of_points(fornix_streamlines, no_pts)
    return streamlines
示例#31
0
    def load_bundles(self,
                     bundle_names,
                     file_path='./',
                     file_suffix='.trk',
                     affine=np.eye(4),
                     bbox_valid_check=False):
        """
        load tractograms from file.

        Parameters
        ----------
        bundle_names : list of strings
            Names of bundles to load.
        file_path : string, optional.
            Path to load trk files from.
            Default: './'
        file_suffix : string, optional.
            File name will be the bundle name + file_suffix.
            Default: '.trk'
        affine : array_like (4, 4), optional.
            The mapping from the file's reference to this object's reference.
            Default: np.eye(4)
        bbox_valid_check : boolean, optional.
            Whether to verify that the bounding box is valid in voxel space.
            Default: False
        """

        for bundle_name in bundle_names:
            full_path = os.path.join(file_path, f"{bundle_name}{file_suffix}")
            if self.reference == 'same':
                sft = load_tractogram(full_path,
                                      self.reference,
                                      bbox_valid_check=bbox_valid_check)
                self.reference = sft
                self.origin = sft.origin
                self.space = sft.space
            else:
                sft = load_tractogram(full_path,
                                      self.reference,
                                      to_space=self.space,
                                      bbox_valid_check=bbox_valid_check)
            sft = self._apply_affine_sft(sft, affine, self.reference,
                                         self.origin)
            self.add_bundle(bundle_name, sft)
            logging.disable(level=logging.WARNING)
        logging.disable(logging.NOTSET)