def bench_load_trk(): rng = np.random.RandomState(42) dtype = 'float32' NB_STREAMLINES = 5000 NB_POINTS = 1000 points = [rng.rand(NB_POINTS, 3).astype(dtype) for i in range(NB_STREAMLINES)] scalars = [rng.rand(NB_POINTS, 10).astype(dtype) for i in range(NB_STREAMLINES)] repeat = 10 with InTemporaryDirectory(): trk_file = "tmp.trk" tractogram = Tractogram(points, affine_to_rasmm=np.eye(4)) TrkFile(tractogram).save(trk_file) streamlines_old = [d[0] - 0.5 for d in tv.read(trk_file, points_space="rasmm")[0]] mtime_old = measure('tv.read(trk_file, points_space="rasmm")', repeat) print("Old: Loaded {:,} streamlines in {:6.2f}".format(NB_STREAMLINES, mtime_old)) trk = nib.streamlines.load(trk_file, lazy_load=False) streamlines_new = trk.streamlines mtime_new = measure('nib.streamlines.load(trk_file, lazy_load=False)', repeat) print("\nNew: Loaded {:,} streamlines in {:6.2}".format(NB_STREAMLINES, mtime_new)) print("Speedup of {:.2f}".format(mtime_old / mtime_new)) for s1, s2 in zip(streamlines_new, streamlines_old): assert_array_equal(s1, s2) # Points and scalars with InTemporaryDirectory(): trk_file = "tmp.trk" tractogram = Tractogram(points, data_per_point={'scalars': scalars}, affine_to_rasmm=np.eye(4)) TrkFile(tractogram).save(trk_file) streamlines_old = [d[0] - 0.5 for d in tv.read(trk_file, points_space="rasmm")[0]] scalars_old = [d[1] for d in tv.read(trk_file, points_space="rasmm")[0]] mtime_old = measure('tv.read(trk_file, points_space="rasmm")', repeat) msg = "Old: Loaded {:,} streamlines with scalars in {:6.2f}" print(msg.format(NB_STREAMLINES, mtime_old)) trk = nib.streamlines.load(trk_file, lazy_load=False) scalars_new = trk.tractogram.data_per_point['scalars'] mtime_new = measure('nib.streamlines.load(trk_file, lazy_load=False)', repeat) msg = "New: Loaded {:,} streamlines with scalars in {:6.2f}" print(msg.format(NB_STREAMLINES, mtime_new)) print("Speedup of {:2f}".format(mtime_old / mtime_new)) for s1, s2 in zip(scalars_new, scalars_old): assert_array_equal(s1, s2)
def main(): parser = _build_args_parser() args = parser.parse_args() assert_inputs_exist(parser, [args.tractogram]) assert_outputs_exist(parser, args, [], [args.save]) tracts_format = detect_format(args.tractogram) if tracts_format is not TrkFile: raise ValueError("Invalid input streamline file format " + "(must be trk): {0}".format(args.tractogram_filename)) # Load files and data trk = TrkFile.load(args.tractogram) tractogram = trk.tractogram streamlines = tractogram.streamlines if 'seeds' not in tractogram.data_per_streamline: parser.error('Tractogram does not contain seeds') seeds = tractogram.data_per_streamline['seeds'] # Make display objects streamlines_actor = actor.line(streamlines) points = actor.dots(seeds, color=(1., 1., 1.)) # Add display objects to canvas r = window.Renderer() r.add(streamlines_actor) r.add(points) # Show and record if needed if args.save is not None: window.record(r, out_path=args.save, size=(1000, 1000)) window.show(r)
def read_s3fs_nib(paths, lazy, block_size, bfile="read_file.bench"): fs = S3FileSystem() fs.invalidate_cache() for path in paths: with fs.open(path, "rb", block_size=block_size) as f: streamlines = TrkFile.load(f, lazy_load=lazy).streamlines
def read_trk(f, lazy, bfile="read_file.bench"): streamlines = TrkFile.load(f, lazy_load=lazy).streamlines if lazy: for stream in streamlines: continue return stream return streamlines[-1]
def read_prefetch_nib(paths, lazy, block_size, prefetch_storage, bfile="read_file.bench"): fs = S3PrefetchFileSystem() fs.invalidate_cache() with fs.open(paths, "rb", block_size=block_size, header_bytes=1000) as f: streamlines = TrkFile.load(f, lazy_load=lazy).streamlines
def read_mem_nib(paths, lazy, bfile="read_file.bench"): for path in paths: with open(path, "rb") as f: streamlines = TrkFile.load(f, lazy_load=lazy).streamlines