def main():
    parser = _build_args_parser()
    args = parser.parse_args()
    assert_inputs_exist(parser, [args.tractogram])
    assert_outputs_exist(parser, args, [], [args.save])

    tracts_format = detect_format(args.tractogram)
    if tracts_format is not TrkFile:
        raise ValueError("Invalid input streamline file format " +
                         "(must be trk): {0}".format(args.tractogram_filename))

    # Load files and data
    trk = TrkFile.load(args.tractogram)
    tractogram = trk.tractogram
    streamlines = tractogram.streamlines
    if 'seeds' not in tractogram.data_per_streamline:
        parser.error('Tractogram does not contain seeds')
    seeds = tractogram.data_per_streamline['seeds']

    # Make display objects
    streamlines_actor = actor.line(streamlines)
    points = actor.dots(seeds, color=(1., 1., 1.))

    # Add display objects to canvas
    r = window.Renderer()
    r.add(streamlines_actor)
    r.add(points)

    # Show and record if needed
    if args.save is not None:
        window.record(r, out_path=args.save, size=(1000, 1000))
    window.show(r)
示例#2
0
def read_s3fs_nib(paths, lazy, block_size, bfile="read_file.bench"):

    fs = S3FileSystem()
    fs.invalidate_cache()

    for path in paths:
        with fs.open(path, "rb", block_size=block_size) as f:
            streamlines = TrkFile.load(f, lazy_load=lazy).streamlines
示例#3
0
def read_trk(f, lazy, bfile="read_file.bench"):
    streamlines = TrkFile.load(f, lazy_load=lazy).streamlines

    if lazy:
        for stream in streamlines:
            continue

        return stream

    return streamlines[-1]
示例#4
0
def read_prefetch_nib(paths,
                      lazy,
                      block_size,
                      prefetch_storage,
                      bfile="read_file.bench"):

    fs = S3PrefetchFileSystem()
    fs.invalidate_cache()

    with fs.open(paths, "rb", block_size=block_size, header_bytes=1000) as f:
        streamlines = TrkFile.load(f, lazy_load=lazy).streamlines
示例#5
0
def read_mem_nib(paths, lazy, bfile="read_file.bench"):

    for path in paths:
        with open(path, "rb") as f:
            streamlines = TrkFile.load(f, lazy_load=lazy).streamlines