swap_end_count[rh_lh] += 1
                            end_count[inv_rh_lh] += 1
                            break
                        j += 1
            new_lines += [new_line]
            i += 1
stdout.write("\n done")
stdout.flush()

print "save intersection"
np.save(args.intersection, id_list)
np.save(args.surf_idx_inter, id_surf)

print "save as .fib"
lines_polydata = lines_to_vtk_polydata(new_lines, None, np.float32)
save_polydata(lines_polydata, args.output , True)

print "Do something with intersection !"

# Report
valid_count = [start_count[0] - bad_start_count[0] - bad_end_count[0] - bad_pft_count[0],
               start_count[1] - bad_start_count[1] - bad_end_count[1] - bad_pft_count[1]]

if args.report is not None:
    report = open(args.report,"w")
    report.write("# Report : [ right , left ]\n")
    report.write("start_count : " + str(start_count) + "\n")
    report.write("bad_pft_count : " + str(bad_pft_count) + "\n")
    report.write("bad_start_count : " + str(bad_start_count) + "\n")
    report.write("end_count : " + str(end_count) + "\n")
    report.write("bad_end_count : " + str(bad_end_count) + "\n") 
Example #2
0
#!/usr/bin/env python

# [email protected]
import argparse
import numpy as np

from trimeshpy.trimeshflow_vtk import lines_to_vtk_polydata
from trimeshpy.trimesh_vtk import save_polydata, load_streamlines_poyldata, get_streamlines

parser = argparse.ArgumentParser(description='lenghts stats')
parser.add_argument('fibers', type=str, nargs='+', default=None, help='tractography fibers (.fib)')
parser.add_argument('-o', type=str, default=None, help='merged tractography (.fib)')


args = parser.parse_args()

streamlines_list = []
for filename in args.fibers: 
    streamlines_list.append(get_streamlines(load_streamlines_poyldata(filename)))
    print filename, len(streamlines_list[-1])

final_streamlines = []
for streamlines in streamlines_list:
    for line in streamlines:
        final_streamlines.append(line)
         
print args.out, len(final_streamlines)
         
lines_polydata = lines_to_vtk_polydata(final_streamlines, None, np.float32)
save_polydata(lines_polydata, args.o , True)
def main():
    parser = buildArgsParser()
    args = parser.parse_args()
    param = {}

    if args.pft_theta is None and args.pft_curvature is None:
        args.pft_theta = 20

    if not np.any([args.nt, args.npv, args.ns]):
        args.npv = 1

    if args.theta is not None:
        theta = gm.math.radians(args.theta)
    elif args.curvature > 0:
        theta = get_max_angle_from_curvature(args.curvature, args.step_size)
    elif args.algo == 'prob':
        theta = gm.math.radians(20)
    else:
        theta = gm.math.radians(45)

    if args.pft_curvature is not None:
        pft_theta = get_max_angle_from_curvature(args.pft_curvature, args.step_size)
    else:
        pft_theta = gm.math.radians(args.pft_theta)

    if args.mask_interp == 'nn':
        mask_interpolation = 'nearest'
    elif args.mask_interp == 'tl':
        mask_interpolation = 'trilinear'
    else:
        parser.error("--mask_interp has wrong value. See the help (-h).")
        return

    if args.field_interp == 'nn':
        field_interpolation = 'nearest'
    elif args.field_interp == 'tl':
        field_interpolation = 'trilinear'
    else:
        parser.error("--sh_interp has wrong value. See the help (-h).")
        return

    param['random'] = args.random
    param['skip'] = args.skip
    param['algo'] = args.algo
    param['mask_interp'] = mask_interpolation
    param['field_interp'] = field_interpolation
    param['theta'] = theta
    param['sf_threshold'] = args.sf_threshold
    param['pft_sf_threshold'] = args.pft_sf_threshold if args.pft_sf_threshold is not None else args.sf_threshold
    param['sf_threshold_init'] = args.sf_threshold_init
    param['step_size'] = args.step_size
    param['max_length'] = args.max_length
    param['min_length'] = args.min_length
    param['is_single_direction'] = args.is_single_direction
    param['nbr_seeds'] = args.nt if args.nt is not None else 0
    param['nbr_seeds_voxel'] = args.npv if args.npv is not None else 0
    param['nbr_streamlines'] = args.ns if args.ns is not None else 0
    param['max_no_dir'] = int(math.ceil(args.maxL_no_dir / param['step_size']))
    param['is_all'] = args.is_all
    param['is_act'] = args.is_act
    param['theta_pft'] = pft_theta
    if args.not_is_pft:
        param['nbr_particles'] = 0
        param['back_tracking'] = 0
        param['front_tracking'] = 0
    else:
        param['nbr_particles'] = args.nbr_particles
        param['back_tracking'] = int(
            math.ceil(args.back_tracking / args.step_size))
        param['front_tracking'] = int(
            math.ceil(args.front_tracking / args.step_size))
    param['nbr_iter'] = param['back_tracking'] + param['front_tracking']
    param['mmap_mode'] = None if args.isLoadData else 'r'

    if args.isVerbose:
        logging.basicConfig(level=logging.DEBUG)

    logging.debug('Tractography parameters:\n{0}'.format(param))

    if os.path.isfile(args.output_file):
        if args.isForce:
            logging.info('Overwriting "{0}".'.format(args.output_file))
        else:
            parser.error(
                '"{0}" already exists! Use -f to overwrite it.'
                .format(args.output_file))

    include_dataset = Dataset(
        nib.load(args.map_include_file), param['mask_interp'])
    exclude_dataset = Dataset(
        nib.load(args.map_exclude_file), param['mask_interp'])
    if param['is_act']:
        mask = ACT(include_dataset, exclude_dataset,
                   param['step_size'] / include_dataset.size[0])
    else:
        mask = CMC(include_dataset, exclude_dataset,
                   param['step_size'] / include_dataset.size[0])

    dataset = Dataset(nib.load(args.sh_file), param['field_interp'])
    field = SphericalHarmonicField(
        dataset, args.basis, param['sf_threshold'],
        param['sf_threshold_init'], param['theta'])

    if args.algo == 'det':
        tracker = deterministicMaximaTracker(field, param['step_size'])
    elif args.algo == 'prob':
        tracker = probabilisticTracker(field, param['step_size'])
    else:
        parser.error("--algo has wrong value. See the help (-h).")
        return

    pft_field = SphericalHarmonicField(
        dataset, args.basis, param['pft_sf_threshold'],
        param['sf_threshold_init'], param['theta_pft'])

    pft_tracker = probabilisticTracker(pft_field, param['step_size'])
    
    # ADD Seed input
    # modify ESO
    nib_mask = nib.load(args.map_include_file)
    seed_points = np.load(args.seed_points)
    seed_dirs = np.load(args.seed_dir)
    rotation = nib_mask.get_affine()[:3,:3]
    inv_rotation = np.linalg.inv(rotation)
    translation = nib_mask.get_affine()[:3,3]
    scale = np.array(nib_mask.get_header().get_zooms())
    voxel_space = nib.aff2axcodes(nib_mask.get_affine())
    
    print voxel_space
    # seed points transfo
    # LPS -> voxel_space
    print scale
    if voxel_space[0] != 'L':
        print "flip X"
        seed_points[:,0] = -seed_points[:,0]
    if voxel_space[1] != 'P':
        print "flip Y"
        seed_points[:,1] = -seed_points[:,1]
    if voxel_space[2] != 'S':
        print "flip Z"
        seed_points[:,2] = -seed_points[:,2]
    
    # other transfo
    seed_points = seed_points - translation
    seed_points = seed_points.dot(inv_rotation)
    seed_points = seed_points * scale
    
    # seed dir transfo
    seed_dirs[:,0:2] = -seed_dirs[:,0:2]
    seed_dirs = seed_dirs.dot(inv_rotation)
    seed_dirs = seed_dirs * scale
    
    if args.inv_seed_dir:
        seed_dirs = seed_dirs * -1.0
    
    # Compute tractography
    nb_seeds = len(seed_dirs)
    if args.test is not None and args.test < nb_seeds:
        nb_seeds = args.test
    # end modify ESO
    
    
    # tracker to modify
    # modify ESO
    start = time.time()
    streamlines = []
    for i in range(nb_seeds):
        s = generate_streamline(tracker, mask, seed_points[i], seed_dirs[i], pft_tracker=pft_tracker, param=param)
        streamlines.append(s)
        stdout.write("\r %d%%" % (i*101//nb_seeds))
        stdout.flush()
    
    stdout.write("\n done")
    stdout.flush()
    stop = time.time()
    # end modify ESO

    
    # ADD save fiber output
    # modify ESO
    for i in range(len(streamlines)):
        streamlines[i] = streamlines[i] / scale
        streamlines[i] = streamlines[i].dot(rotation)
        streamlines[i] = streamlines[i] + translation
        # voxel_space -> LPS
        if voxel_space[0] != 'L':
            streamlines[i][:,0] = -streamlines[i][:,0]
        if voxel_space[1] != 'P':
            streamlines[i][:,1] = -streamlines[i][:,1]
        if voxel_space[2] != 'S':
            streamlines[i][:,2] = -streamlines[i][:,2]
    
    lines_polydata = lines_to_vtk_polydata(streamlines, None, np.float32)
    save_polydata(lines_polydata, args.output_file , True)
    # end modify ESO

    lengths = [len(s) for s in streamlines]
    if nb_seeds > 0:
        ave_length = (sum(lengths) / nb_seeds) * param['step_size']
    else:
        ave_length = 0
    
    str_ave_length = "%.2f" % ave_length
    str_time = "%.2f" % (stop - start)
    print(str(nb_seeds) + " streamlines, with an average length of " +
          str_ave_length + " mm, done in " + str_time + " seconds.")
def main():
    np.random.seed(int(time.time()))
    parser = buildArgsParser()
    args = parser.parse_args()

    param = {}
    
    if args.algo not in ["det", "prob"]:
        parser.error("--algo has wrong value. See the help (-h).")
    
    if args.basis not in ["mrtrix", "dipy", "fibernav"]:
        parser.error("--basis has wrong value. See the help (-h).")
    
    #if np.all([args.nt is None, args.npv is None, args.ns is None]):
    #    args.npv = 1
    
    if args.theta is not None:
        theta = gm.math.radians(args.theta)
    elif args.curvature > 0:
        theta = get_max_angle_from_curvature(args.curvature, args.step_size)
    elif args.algo == 'prob':
        theta = gm.math.radians(20)
    else:
        theta = gm.math.radians(45)
    
    if args.mask_interp == 'nn':
        mask_interpolation = 'nearest'
    elif args.mask_interp == 'tl':
        mask_interpolation = 'trilinear'
    else:
        parser.error("--mask_interp has wrong value. See the help (-h).")
        return
    
    if args.field_interp == 'nn':
        field_interpolation = 'nearest'
    elif args.field_interp == 'tl':
        field_interpolation = 'trilinear'
    else:
        parser.error("--sh_interp has wrong value. See the help (-h).")
        return
    
    param['algo'] = args.algo
    param['mask_interp'] = mask_interpolation
    param['field_interp'] = field_interpolation
    param['theta'] = theta
    param['sf_threshold'] = args.sf_threshold
    param['sf_threshold_init'] = args.sf_threshold_init
    param['step_size'] = args.step_size
    param['max_length'] = args.max_length
    param['min_length'] = args.min_length
    param['is_single_direction'] = False
    param['nbr_seeds'] = 0
    param['nbr_seeds_voxel'] = 0
    param['nbr_streamlines'] = 0
    param['max_no_dir'] = int(math.ceil(args.maxL_no_dir / param['step_size']))
    param['is_all'] = False
    param['isVerbose'] = args.isVerbose
    
    if param['isVerbose']:
        logging.basicConfig(level=logging.DEBUG)
    
    if param['isVerbose']:
        logging.info('Tractography parameters:\n{0}'.format(param))
    
    if os.path.isfile(args.output_file):
        if args.isForce:
            logging.info('Overwriting "{0}".'.format(args.output_file))
        else:
            parser.error(
                '"{0}" already exists! Use -f to overwrite it.'
                .format(args.output_file))
    
    nib_mask = nib.load(args.mask_file)
    mask = BinaryMask(
        Dataset(nib_mask, param['mask_interp']))
    
    dataset = Dataset(nib.load(args.sh_file), param['field_interp'])
    field = SphericalHarmonicField(
        dataset, args.basis, param['sf_threshold'], param['sf_threshold_init'], param['theta'])
    
    if args.algo == 'det':
        tracker = deterministicMaximaTracker(field, param['step_size'])
    elif args.algo == 'prob':
        tracker = probabilisticTracker(field, param['step_size'])
    else:
        parser.error("--algo has wrong value. See the help (-h).")
        return
    
    start = time.time()
    
    # Etienne St-Onge
    #load and transfo *** todo test with rotation and scaling
    seed_points = np.load(args.seed_points)
    seed_dirs = np.load(args.seed_dir)
    rotation = nib_mask.get_affine()[:3,:3]
    inv_rotation = np.linalg.inv(rotation)
    translation = nib_mask.get_affine()[:3,3]
    scale = np.array(nib_mask.get_header().get_zooms())
    voxel_space = nib.aff2axcodes(nib_mask.get_affine())
    
    print voxel_space
    # seed points transfo
    # LPS -> voxel_space
    if voxel_space[0] != 'L':
        print "flip X"
        seed_points[:,0] = -seed_points[:,0]
    if voxel_space[1] != 'P':
        print "flip Y"
        seed_points[:,1] = -seed_points[:,1]
    if voxel_space[2] != 'S':
        print "flip Z"
        seed_points[:,2] = -seed_points[:,2]
    
    # other transfo
    seed_points = seed_points - translation
    seed_points = seed_points.dot(inv_rotation)
    seed_points = seed_points * scale
    
    # seed dir transfo
    seed_dirs[:,0:2] = -seed_dirs[:,0:2]
    seed_dirs = seed_dirs.dot(inv_rotation)
    seed_dirs = seed_dirs * scale
    
    if args.inv_seed_dir:
        seed_dirs = seed_dirs * -1.0
    
    # Compute tractography
    nb_seeds = len(seed_dirs)
    if args.test is not None and args.test < nb_seeds:
        nb_seeds = args.test
    
    print args.algo," nb seeds: ", nb_seeds
    
    streamlines = []
    for i in range(nb_seeds):
        s = generate_streamline(tracker, mask, seed_points[i], seed_dirs[i], pft_tracker=None, param=param)
        streamlines.append(s)
        
        stdout.write("\r %d%%" % (i*101//nb_seeds))
        stdout.flush()
    stdout.write("\n done")
    stdout.flush()
    
    # transform back
    for i in range(len(streamlines)):
        streamlines[i] = streamlines[i] / scale
        streamlines[i] = streamlines[i].dot(rotation)
        streamlines[i] = streamlines[i] + translation
        # voxel_space -> LPS
        if voxel_space[0] != 'L':
            streamlines[i][:,0] = -streamlines[i][:,0]
        if voxel_space[1] != 'P':
            streamlines[i][:,1] = -streamlines[i][:,1]
        if voxel_space[2] != 'S':
            streamlines[i][:,2] = -streamlines[i][:,2]
    
    lines_polydata = lines_to_vtk_polydata(streamlines, None, np.float32)
    save_polydata(lines_polydata, args.output_file , True)
    
    lengths = [len(s) for s in streamlines]
    if nb_seeds > 0:
        ave_length = (sum(lengths) / nb_seeds) * param['step_size']
    else:
        ave_length = 0
    
    str_ave_length = "%.2f" % ave_length
    str_time = "%.2f" % (time.time() - start)
    print(str(nb_seeds) + " streamlines, with an average length of " +
          str_ave_length + " mm, done in " + str_time + " seconds.")
Example #5
0
#tri_mesh_flow.display_vertices_flow()
mesh.set_vertices(lines[99])
#test = mesh.edge_triangle_normal_angle().max(1).toarray().squeeze()
#test = mesh.vertices_gaussian_curvature(False)
test = mesh.vertices_cotan_curvature(False)
print "min =", test.min(), "max =", test.max()
print test
color = np.zeros_like(lines[0])
tmax = test.max()
tmin = -test.min()
color[:,0] = np.maximum(test,0).reshape((1,-1))*255/tmax
color[:,2] = np.maximum(-test,0).reshape((1,-1))*255/tmin
mesh.set_colors(color)
mesh.display()
"""
"""
line_to_save = np.swapaxes(lines, 0, 1)
rend = fvtk.ren()
fvtk.add(rend, mesh.get_vtk_actor())
fvtk.add(rend, fvtk.line(line_to_save))
fvtk.show(rend)
"""
"""
# save fibers in .fib normal
#line_to_save = streamline.compress_streamlines(np.swapaxes(lines, 0, 1))
line_to_save = np.swapaxes(lines, 0, 1)
lines_polydata = lines_to_vtk_polydata(line_to_save, None, np.float32)
save_polydata(lines_polydata, saved_fib, True)


"""
Example #6
0
from trimeshpy.trimesh_vtk import load_polydata, save_polydata, load_streamlines_poyldata, get_streamlines
from trimeshpy.trimeshflow_vtk import lines_to_vtk_polydata
import numpy as np

fib_file_name = "../data/tract.fib"
save_file = "../data/tract.xml"
#save_file = "../data/tract.stl"


polydata = load_polydata(fib_file_name)
save_polydata(polydata, save_file)


### load streamlines liste and save a a new smaller file
"""lines = get_streamlines(load_streamlines_poyldata(fib_file_name))
new_lines = lines[0:1000]
lines_polydata = lines_to_vtk_polydata(new_lines, None, np.float32)
save_polydata(lines_polydata, "../data/tract2.fib")"""