Exemple #1
0
def main():

    parser = _build_args_parser()
    args = parser.parse_args()

    assert_inputs_exist(parser, [args.in_tractogram])
    assert_outputs_exists(parser, args, args.out_tractogram)

    tractogram_file = load(args.in_tractogram)
    streamlines = list(tractogram_file.streamlines)

    data_per_point = tractogram_file.tractogram.data_per_point
    data_per_streamline = tractogram_file.tractogram.data_per_streamline

    new_streamlines, new_per_point, new_per_streamline = get_subset_streamlines(
                                                       streamlines,
                                                       data_per_point,
                                                       data_per_streamline,
                                                       args.max_num_streamlines,
                                                       args.seed)

    new_tractogram = Tractogram(new_streamlines,
                                data_per_point=new_per_point,
                                data_per_streamline=new_per_streamline,
                                affine_to_rasmm=np.eye(4))

    save(new_tractogram, args.out_tractogram, header=tractogram_file.header)
Exemple #2
0
def load_data(path):
    logging.info('Loading streamlines from {0}.'.format(path))
    tractogram = load(path).tractogram
    streamlines = tractogram.streamlines
    data_per_streamline = tractogram.data_per_streamline
    data_per_point = tractogram.data_per_point

    return streamlines, data_per_streamline, data_per_point
def main():
    parser = _build_arg_parser()
    args = parser.parse_args()

    assert_inputs_exist(parser, [args.tractogram_filename])
    assert_outputs_exist(parser, args, [args.seed_density_filename])

    tracts_format = detect_format(args.tractogram_filename)
    if tracts_format is TckFile:
        raise ValueError("Invalid input streamline file format " +
                         "(must be trk): {0}".format(args.tractogram_filename))

    max_ = np.iinfo(np.int16).max
    if args.binary is not None and (args.binary <= 0 or args.binary > max_):
        parser.error(
            'The value of --binary ({}) '
            'must be greater than 0 and smaller or equal to {}'.format(
                args.binary, max_))

    # Load tractogram and load seeds
    tracts_file = load(args.tractogram_filename, args.lazy_load)
    if 'seeds' in tracts_file.tractogram.data_per_streamline:
        seeds = tracts_file.tractogram.data_per_streamline['seeds']
    else:
        parser.error('Tractogram does not contain seeds')

    # Transform seeds if they're all in memory
    if not args.lazy_load:
        seeds = apply_affine(np.linalg.inv(tracts_file.affine), seeds)

    # Create seed density map
    shape = tracts_file.header[Field.DIMENSIONS]
    seed_density = np.zeros(shape, dtype=np.int32)
    for seed in seeds:
        # If seeds are lazily loaded, we have to transform them
        # as they get loaded
        if args.lazy_load:
            seed = apply_affine(np.linalg.inv(tracts_file.affine), seed)

        # Set value at mask, either binary or increment
        seed_voxel = np.round(seed).astype(np.int)
        if args.binary is not None:
            seed_density[tuple(seed_voxel)] = args.binary
        else:
            seed_density[tuple(seed_voxel)] += 1

    # Save seed density map
    dm_img = Nifti1Image(seed_density.astype(np.int32), tracts_file.affine)
    dm_img.to_filename(args.seed_density_filename)
Exemple #4
0
def main():

    parser = _build_args_parser()
    args = parser.parse_args()

    assert_inputs_exist(parser, [args.in_tractogram])
    assert_outputs_exist(parser, args, args.out_tractogram)

    tractogram_file = load(args.in_tractogram)
    streamlines = list(tractogram_file.streamlines)

    new_streamlines = resample_streamlines(streamlines,
                                           args.nb_pts_per_streamline,
                                           args.arclength)

    new_tractogram = Tractogram(
        new_streamlines,
        data_per_streamline=tractogram_file.tractogram.data_per_streamline,
        affine_to_rasmm=np.eye(4))

    save(new_tractogram, args.out_tractogram, header=tractogram_file.header)
Exemple #5
0
def main():

    parser = _build_args_parser()
    args = parser.parse_args()

    assert_inputs_exist(parser, args.in_tractogram)
    assert_outputs_exist(parser, args, args.out_tractogram)

    tractogram_file = load(args.in_tractogram)
    streamlines = list(tractogram_file.streamlines)

    data_per_point = tractogram_file.tractogram.data_per_point
    data_per_streamline = tractogram_file.tractogram.data_per_streamline

    new_streamlines, new_per_point, new_per_streamline = filter_streamlines_by_length(
        streamlines, data_per_point, data_per_streamline, args.minL, args.maxL)

    new_tractogram = Tractogram(new_streamlines,
                                data_per_streamline=new_per_streamline,
                                data_per_point=new_per_point,
                                affine_to_rasmm=np.eye(4))

    save(new_tractogram, args.out_tractogram, header=tractogram_file.header)
Exemple #6
0
import requests
import os
from nibabel import Nifti1Image, GiftiImage, streamlines
from skimage import io
import gzip
from getpass import getpass
from io import BytesIO
import urllib
import pandas as pd

DECODERS = {
    ".nii.gz": lambda b: Nifti1Image.from_bytes(gzip.decompress(b)),
    ".nii": lambda b: Nifti1Image.from_bytes(b),
    ".gii": lambda b: GiftiImage.from_bytes(b),
    ".json": lambda b: json.loads(b.decode()),
    ".tck": lambda b: streamlines.load(BytesIO(b)),
    ".csv": lambda b: pd.read_csv(BytesIO(b), delimiter=";"),
    ".tsv": lambda b: pd.read_csv(BytesIO(b), delimiter="\t"),
    ".txt": lambda b: pd.read_csv(BytesIO(b), delimiter=" ", header=None),
    ".zip": lambda b: ZipFile(BytesIO(b)),
    ".png": lambda b: io.imread(BytesIO(b))
}


class SiibraHttpRequestError(Exception):
    def __init__(self, response, msg="Cannot execute http request."):
        self.response = response
        self.msg = msg
        Exception.__init__(self)

    def __str__(self):
Exemple #7
0
def main():

    parser = build_args_parser()
    args = parser.parse_args()

    if args.verbose:
        logging.basicConfig(level=logging.INFO)

    if os.path.isfile(args.output):
        if args.force:
            logging.info('Overwriting {0}.'.format(args.output))
        else:
            parser.error('{0} already exist! Use -f to overwrite it.'.format(
                args.output))

    # Load all input streamlines.
    data = [load_data(f) for f in args.inputs]
    streamlines, data_per_streamline, data_per_point = zip(*data)
    nb_streamlines = [len(s) for s in streamlines]

    # Apply the requested operation to each input file.
    logging.info('Performing operation \'{}\'.'.format(args.operation))
    new_streamlines, indices = perform_streamlines_operation(
        OPERATIONS[args.operation], streamlines, args.precision)

    # Get the meta data of the streamlines.
    new_data_per_streamline = {}
    new_data_per_point = {}
    if not args.no_data:

        for key in data_per_streamline[0].keys():
            all_data = np.vstack([s[key] for s in data_per_streamline])
            new_data_per_streamline[key] = all_data[indices, :]

        # Add the indices to the metadata if requested.
        if args.save_meta_indices:
            new_data_per_streamline['ids'] = indices

        for key in data_per_point[0].keys():
            all_data = list(chain(*[s[key] for s in data_per_point]))
            new_data_per_point[key] = [all_data[i] for i in indices]

    # Save the indices to a file if requested.
    if args.save_indices is not None:
        start = 0
        indices_dict = {'filenames': args.inputs}
        for name, nb in zip(args.inputs, nb_streamlines):
            end = start + nb
            file_indices = \
                [i - start for i in indices if i >= start and i < end]
            indices_dict[name] = file_indices
            start = end
        with open(args.save_indices, 'wt') as f:
            json.dump(indices_dict, f)

    # Save the new streamlines.
    logging.info('Saving streamlines to {0}.'.format(args.output))
    reference_file = load(args.inputs[0], True)
    new_tractogram = Tractogram(new_streamlines,
                                data_per_streamline=new_data_per_streamline,
                                data_per_point=new_data_per_point)

    # If the reference is a .tck, the affine will be None.
    affine = reference_file.tractogram.affine_to_rasmm
    if affine is None:
        affine = np.eye(4)
    new_tractogram.affine_to_rasmm = affine

    new_header = reference_file.header.copy()
    new_header['nb_streamlines'] = len(new_streamlines)
    save(new_tractogram, args.output, header=new_header)
 def load_tractogram(self):
     tractogram_data = streamlines.load(self.tractogram_path)
     self.tractogram = tractogram_data.streamlines