Esempio n. 1
0
def main():
    parser = _build_args_parser()
    args = parser.parse_args()

    assert_inputs_exist(parser, [args.tracts, args.ref_anat])
    assert_outputs_exists(parser, args, [args.out])
    check_tracts_support(parser, args.tracts, args.tracts_producer)

    max_ = np.iinfo(np.int16).max
    if args.binary is not None and (args.binary <= 0 or args.binary > max_):
        parser.error(
            'The value of --binary ({}) '
            'must be greater than 0 and smaller or equal to {}'.format(
                args.binary, max_))

    streamlines = list(
        load_tracts_over_grid(args.tracts,
                              args.ref_anat,
                              start_at_corner=True,
                              tract_producer=args.tracts_producer))

    # Compute weighting matrix taking the compression into account
    ref_img = nb.load(args.ref_anat)
    anat_dim = ref_img.get_header().get_data_shape()
    tract_counts = compute_robust_tract_counts_map(streamlines, anat_dim)

    if args.binary is not None:
        tract_counts[tract_counts > 0] = args.binary

    bin_img = nb.Nifti1Image(tract_counts.astype(np.int16),
                             ref_img.get_affine())
    nb.save(bin_img, args.out)
Esempio n. 2
0
def main():
    parser = _build_arg_parser()
    args = parser.parse_args()

    assert_inputs_exist(
        parser, [args.bundle, args.centroid_streamline, args.reference])
    assert_outputs_exists(parser, args, [args.output_map])

    bundle_tractogram_file = nib.streamlines.load(args.bundle)
    centroid_tractogram_file = nib.streamlines.load(args.centroid_streamline)
    if int(bundle_tractogram_file.header['nb_streamlines']) == 0:
        logger.warning('Empty bundle file {}. Skipping'.format(args.bundle))
        return

    if int(centroid_tractogram_file.header['nb_streamlines']) != 1:
        logger.warning('Centroid file {} should contain one streamline. '
                       'Skipping'.format(args.centroid_streamline))
        return

    ref_img = nib.load(args.reference)
    bundle_streamlines_vox = load_in_voxel_space(bundle_tractogram_file,
                                                 ref_img)
    bundle_streamlines_vox._data *= args.upsample

    number_of_centroid_points = len(centroid_tractogram_file.streamlines[0])
    if number_of_centroid_points > 99:
        raise Exception('Invalid number of points in the centroid. You should '
                        'have a maximum of 99 points in your centroid '
                        'streamline. '
                        'Current is {}'.format(number_of_centroid_points))

    centroid_streamlines_vox = load_in_voxel_space(centroid_tractogram_file,
                                                   ref_img)
    centroid_streamlines_vox._data *= args.upsample

    upsampled_shape = [s * args.upsample for s in ref_img.shape]
    tdi_mask = compute_robust_tract_counts_map(bundle_streamlines_vox,
                                               upsampled_shape) > 0

    tdi_mask_nzr = np.nonzero(tdi_mask)
    tdi_mask_nzr_ind = np.transpose(tdi_mask_nzr)

    min_dist_ind, _ = min_dist_to_centroid(tdi_mask_nzr_ind,
                                           centroid_streamlines_vox[0])

    # Save the (upscaled) labels mask
    labels_mask = np.zeros(tdi_mask.shape)
    labels_mask[tdi_mask_nzr] = min_dist_ind + 1  # 0 is background value
    rescaled_affine = ref_img.affine
    rescaled_affine[:3, :3] /= args.upsample
    labels_img = nib.Nifti1Image(labels_mask, rescaled_affine)
    upsampled_spacing = ref_img.header['pixdim'][1:4] / args.upsample
    labels_img.header.set_zooms(upsampled_spacing)
    nib.save(labels_img, args.output_map)
def main():
    parser = buildArgsParser()
    args = parser.parse_args()

    assert_inputs_exist(parser, [args.tracts, args.ref_anat])
    assert_outputs_exists(parser, args, [args.out_file_name])
    check_tracts_support(parser, args.tracts, args.tracts_producer)

    streamlines = list(
        load_tracts_over_grid(args.tracts,
                              args.ref_anat,
                              start_at_corner=True,
                              tract_producer=args.tracts_producer))

    # Compute weighting matrix taking the compression into account
    ref_img = nb.load(args.ref_anat)
    anat_dim = ref_img.get_header().get_data_shape()
    tract_counts_map = compute_robust_tract_counts_map(streamlines, anat_dim)

    voxel_volume = np.count_nonzero(tract_counts_map)
    resolution = np.prod(ref_img.header.get_zooms())
    mm_volume = voxel_volume * resolution

    # Mean density
    weights = np.copy(tract_counts_map)
    weights[weights > 0] = 1
    mean_density = np.average(tract_counts_map, weights=weights)

    tract_count = get_tract_count(streamlines)

    stats_names = ['tract_count', 'tract_volume', 'tract_mean_density']
    means = [tract_count, mm_volume, mean_density]

    # Format the output.
    if args.out_style == 'tabular':
        formatted_out = format_stats_tabular(stats_names,
                                             means,
                                             stddevs=None,
                                             write_header=args.header)
    elif args.out_style == 'csv':
        formatted_out = format_stats_csv(stats_names,
                                         means,
                                         stddevs=None,
                                         write_header=args.header)

    if args.out_file_name is None:
        print(formatted_out)
    else:
        out_file = open(args.out_file_name, 'w')
        out_file.write(formatted_out)
        out_file.close()
Esempio n. 4
0
def get_metrics_stats_over_streamlines_robust(streamlines,
                                              metrics_files,
                                              density_weighting=True):
    """
    Returns the mean value of each metric, only considering voxels that
    are crossed by streamlines. The mean values are weighted by the number of
    streamlines crossing a voxel by default. If false, every voxel traversed
    by a streamline has the same weight.

    Parameters
    ------------
    streamlines : sequence
        sequence of T streamlines. One streamline is an ndarray of shape
        (N, 3), where N is the number of points in that streamline, and
        ``streamlines[t][n]`` is the n-th point in the t-th streamline. Points
        are of form x, y, z in voxmm coordinates.

    metrics_files : sequence
        list of nibabel objects representing the metrics files

    density_weighting : bool
        weigh by the mean by the density of streamlines going through the voxel

    Returns
    ---------
    stats : list
        list of tuples where the first element of the tuple is the mean
        of a metric, and the second element is the standard deviation

    """

    # Compute weighting matrix taking the possible compression into account
    anat_dim = metrics_files[0].get_header().get_data_shape()
    weights = compute_robust_tract_counts_map(streamlines, anat_dim)

    if not density_weighting:
        weights = weights > 0

    return map(
        lambda metric_file: weighted_mean_stddev(
            weights,
            metric_file.get_data().astype(np.float64)), metrics_files)
Esempio n. 5
0
def main():
    parser = _build_arg_parser()
    args = parser.parse_args()

    assert_inputs_exist(parser,
                        [args.bundle, args.label_map, args.distance_map] +
                        args.metrics)

    bundle_tractogram_file = nib.streamlines.load(args.bundle)

    stats = {}
    bundle_name, _ = os.path.splitext(os.path.basename(args.bundle))
    if len(bundle_tractogram_file.streamlines) == 0:
        stats[bundle_name] = None
        print(json.dumps(stats, indent=args.indent, sort_keys=args.sort_keys))
        return

    metrics = [nib.load(m) for m in args.metrics]
    assert_same_resolution(*metrics)
    streamlines_vox = load_in_voxel_space(bundle_tractogram_file, metrics[0])

    if args.density_weighting:
        track_count = compute_robust_tract_counts_map(
            streamlines_vox, metrics[0].shape).astype(np.float64)
    else:
        track_count = np.ones(metrics[0].shape)

    label_file = np.load(args.label_map)
    labels = label_file['arr_0']

    distance_file = np.load(args.distance_map)
    distances_to_centroid_streamline = distance_file['arr_0']
    # Bigger weight near the centroid streamline
    distances_to_centroid_streamline = 1.0 / distances_to_centroid_streamline

    if len(labels) != len(distances_to_centroid_streamline):
        raise Exception('Label map doesn\'t contain the same number of '
                        'entries as the distance map. {} != {}'.format(
                            len(labels),
                            len(distances_to_centroid_streamline)))

    bundle_data_int = streamlines_vox.data.astype(np.int)
    stats[bundle_name] = {}
    for metric in metrics:
        metric_data = metric.get_data()
        current_metric_fname, _ = split_name_with_nii(
            os.path.basename(metric.get_filename()))
        stats[bundle_name][current_metric_fname] = {}

        for i in np.unique(labels):
            number_key = '{:02}'.format(i)
            label_stats = {}
            stats[bundle_name][current_metric_fname][number_key] = label_stats

            label_indices = bundle_data_int[labels == i]
            label_metric = metric_data[label_indices[:, 0],
                                       label_indices[:, 1], label_indices[:,
                                                                          2]]
            track_weight = track_count[label_indices[:, 0],
                                       label_indices[:, 1], label_indices[:,
                                                                          2]]
            label_weight = track_weight
            if args.distance_weighting:
                label_weight *= distances_to_centroid_streamline[labels == i]
            if np.sum(label_weight) == 0:
                logger.warning('Weights sum to zero, can\'t be normalized. '
                               'Disabling weighting')
                label_weight = None

            label_mean = np.average(label_metric, weights=label_weight)
            label_std = np.sqrt(
                np.average((label_metric - label_mean)**2,
                           weights=label_weight))
            label_stats['mean'] = float(label_mean)
            label_stats['std'] = float(label_std)

    print(json.dumps(stats, indent=args.indent, sort_keys=args.sort_keys))