def main():
    parser = _build_arg_parser()
    args = parser.parse_args()

    assert_inputs_exist(parser, [args.in_tractogram, args.in_transfo])
    assert_outputs_exist(parser, args, args.out_tractogram)

    if args.verbose:
        log_level = logging.INFO
        logging.basicConfig(level=log_level)

    wb_file = load_tractogram_with_reference(parser, args, args.in_tractogram)
    wb_streamlines = wb_file.streamlines
    model_file = load_tractogram_with_reference(parser, args, args.in_model)

    transfo = load_matrix_in_any_format(args.in_transfo)
    if args.inverse:
        transfo = np.linalg.inv(load_matrix_in_any_format(args.in_transfo))

    before, after = compute_distance_barycenters(wb_file, model_file, transfo)
    if after > before:
        logging.warning('The distance between volumes barycenter should be '
                        'lower after registration. Maybe try using/removing '
                        '--inverse.')
        logging.info('Distance before: {}, Distance after: {}'.format(
            np.round(before, 3), np.round(after, 3)))
    model_streamlines = transform_streamlines(model_file.streamlines, transfo)

    rng = np.random.RandomState(args.seed)
    if args.in_pickle:
        with open(args.in_pickle, 'rb') as infile:
            cluster_map = pickle.load(infile)
        reco_obj = RecoBundles(wb_streamlines,
                               cluster_map=cluster_map,
                               rng=rng,
                               verbose=args.verbose)
    else:
        reco_obj = RecoBundles(wb_streamlines,
                               clust_thr=args.tractogram_clustering_thr,
                               rng=rng,
                               verbose=args.verbose)

    if args.out_pickle:
        with open(args.out_pickle, 'wb') as outfile:
            pickle.dump(reco_obj.cluster_map, outfile)
    _, indices = reco_obj.recognize(ArraySequence(model_streamlines),
                                    args.model_clustering_thr,
                                    pruning_thr=args.pruning_thr,
                                    slr_num_threads=args.slr_threads)
    new_streamlines = wb_streamlines[indices]
    new_data_per_streamlines = wb_file.data_per_streamline[indices]
    new_data_per_points = wb_file.data_per_point[indices]

    if not args.no_empty or new_streamlines:
        sft = StatefulTractogram(new_streamlines,
                                 wb_file.space_attributes,
                                 Space.RASMM,
                                 data_per_streamline=new_data_per_streamlines,
                                 data_per_point=new_data_per_points)
        save_tractogram(sft, args.out_tractogram)
Beispiel #2
0
def main():
    parser = _build_arg_parser()
    args = parser.parse_args()

    assert_inputs_exist(parser, args.in_matrices)
    assert_outputs_exist(parser, args, args.out_json)

    all_matrices = []
    for filename in args.in_matrices:
        tmp_mat = load_matrix_in_any_format(filename)
        tmp_mat = tmp_mat.astype(float)
        tmp_mat -= np.min(tmp_mat)
        if args.normalize:
            all_matrices.append(tmp_mat / np.max(tmp_mat))
        else:
            all_matrices.append(tmp_mat)

    if args.single_compare:
        tmp_mat = load_matrix_in_any_format(args.single_compare)
        tmp_mat = tmp_mat.astype(float)
        tmp_mat -= np.min(tmp_mat)
        if args.normalize:
            all_matrices.append(tmp_mat / np.max(tmp_mat))
        else:
            all_matrices.append(tmp_mat)

    output_measures_dict = {
        'SSD': [],
        'correlation': [],
        'w_dice_voxels': [],
        'dice_voxels': []
    }

    if args.single_compare:
        if args.single_compare in args.in_matrices:
            id = args.in_matrices.index(args.single_compare)
            all_matrices.pop(id)
        pairs = list(itertools.product(all_matrices[:-1], [all_matrices[-1]]))
    else:
        pairs = list(itertools.combinations(all_matrices, r=2))

    for i in pairs:
        ssd = np.sum((i[0] - i[1])**2)
        output_measures_dict['SSD'].append(ssd)
        corrcoef = np.corrcoef(i[0].ravel(), i[1].ravel())
        output_measures_dict['correlation'].append(corrcoef[0][1])
        dice, w_dice = compute_dice_voxel(i[0], i[1])
        output_measures_dict['dice_voxels'].append(dice)
        output_measures_dict['w_dice_voxels'].append(w_dice)

    with open(args.out_json, 'w') as outfile:
        json.dump(output_measures_dict,
                  outfile,
                  indent=args.indent,
                  sort_keys=args.sort_keys)
def main():
    parser = _build_arg_parser()
    args = parser.parse_args()

    assert_inputs_exist(parser, [args.in_tractogram, args.in_transfo])
    assert_outputs_exist(parser, args, args.out_tractogram)

    wb_file = load_tractogram_with_reference(parser, args, args.in_tractogram)
    wb_streamlines = wb_file.streamlines
    model_file = load_tractogram_with_reference(parser, args, args.in_model)

    # Default transformation source is expected to be ANTs
    transfo = load_matrix_in_any_format(args.in_transfo)
    if args.inverse:
        transfo = np.linalg.inv(load_matrix_in_any_format(args.in_transfo))

    model_streamlines = transform_streamlines(model_file.streamlines, transfo)

    rng = np.random.RandomState(args.seed)
    if args.in_pickle:
        with open(args.in_pickle, 'rb') as infile:
            cluster_map = pickle.load(infile)
        reco_obj = RecoBundles(wb_streamlines,
                               cluster_map=cluster_map,
                               rng=rng,
                               verbose=args.verbose)
    else:
        reco_obj = RecoBundles(wb_streamlines,
                               clust_thr=args.tractogram_clustering_thr,
                               rng=rng,
                               verbose=args.verbose)

    if args.out_pickle:
        with open(args.out_pickle, 'wb') as outfile:
            pickle.dump(reco_obj.cluster_map, outfile)
    _, indices = reco_obj.recognize(ArraySequence(model_streamlines),
                                    args.model_clustering_thr,
                                    pruning_thr=args.pruning_thr,
                                    slr_num_threads=args.slr_threads)
    new_streamlines = wb_streamlines[indices]
    new_data_per_streamlines = wb_file.data_per_streamline[indices]
    new_data_per_points = wb_file.data_per_point[indices]

    if not args.no_empty or new_streamlines:
        sft = StatefulTractogram(new_streamlines,
                                 wb_file.space_attributes,
                                 Space.RASMM,
                                 data_per_streamline=new_data_per_streamlines,
                                 data_per_point=new_data_per_points)
        save_tractogram(sft, args.out_tractogram)
def main():
    parser = _build_arg_parser()
    args = parser.parse_args()

    assert_inputs_exist(
        parser,
        [args.in_moving_tractogram, args.in_target_file, args.in_transfo],
        args.in_deformation)
    assert_outputs_exist(parser, args, args.out_tractogram)

    moving_sft = load_tractogram_with_reference(parser,
                                                args,
                                                args.in_moving_tractogram,
                                                bbox_check=False)

    transfo = load_matrix_in_any_format(args.in_transfo)
    deformation_data = None
    if args.in_deformation is not None:
        deformation_data = np.squeeze(
            nib.load(args.in_deformation).get_fdata(dtype=np.float32))

    new_sft = transform_warp_streamlines(moving_sft,
                                         transfo,
                                         args.in_target_file,
                                         inverse=args.inverse,
                                         deformation_data=deformation_data,
                                         remove_invalid=args.remove_invalid,
                                         cut_invalid=args.cut_invalid)

    if args.keep_invalid:
        if not new_sft.is_bbox_in_vox_valid():
            logging.warning('Saving tractogram with invalid streamlines.')
        save_tractogram(new_sft, args.out_tractogram, bbox_valid_check=False)
    else:
        save_tractogram(new_sft, args.out_tractogram)
def main():
    parser = _build_arg_parser()
    args = parser.parse_args()

    assert_inputs_exist(
        parser, [args.in_tractogram, args.in_config_file, args.in_transfo])

    for directory in args.in_models_directories:
        if not os.path.isdir(directory):
            parser.error('Input folder {0} does not exist'.format(directory))

    assert_output_dirs_exist_and_empty(parser, args, args.out_dir)

    logging.basicConfig(
        filename=os.path.join(args.out_dir, 'logfile.txt'),
        filemode='w',
        format='%(asctime)s, %(name)s %(levelname)s %(message)s',
        datefmt='%H:%M:%S',
        level=args.log_level)

    coloredlogs.install(level=args.log_level)

    transfo = load_matrix_in_any_format(args.in_transfo)
    if args.inverse:
        transfo = np.linalg.inv(load_matrix_in_any_format(args.in_transfo))

    with open(args.in_config_file) as json_data:
        config = json.load(json_data)

    voting = VotingScheme(
        config,
        args.in_models_directories,
        transfo,
        args.out_dir,
        tractogram_clustering_thr=args.tractogram_clustering_thr,
        minimal_vote_ratio=args.minimal_vote_ratio,
        multi_parameters=args.multi_parameters)

    if args.seeds is None:
        seeds = [random.randint(1, 1000)]
    else:
        seeds = args.seeds

    voting(args.in_tractogram, nbr_processes=args.nbr_processes, seeds=seeds)
def main():
    parser = _build_arg_parser()
    args = parser.parse_args()

    assert_inputs_exist(parser, args.in_matrices,
                        [args.labels_list, args.in_ordering])
    assert_output_dirs_exist_and_empty(parser, args, [], args.out_dir)
    if args.out_dir is None:
        args.out_dir = './'
    if args.out_suffix is None:
        args.out_suffix = ""
    out_filenames = []
    for filename in args.in_matrices:
        basename, _ = os.path.splitext(filename)
        basename = os.path.basename(basename)
        out_filenames.append('{}/{}{}.npy'.format(args.out_dir,
                                                  basename,
                                                  args.out_suffix))

    assert_outputs_exist(parser, args, out_filenames)
    with open(args.in_ordering, 'r') as my_file:
        lines = my_file.readlines()
        ordering = [[int(val) for val in lines[0].split()],
                    [int(val) for val in lines[1].split()]]

    for filename in args.in_matrices:
        basename, _ = os.path.splitext(filename)
        basename = os.path.basename(basename)
        matrix = load_matrix_in_any_format(filename)

        if args.labels_list:
            labels_list = np.loadtxt(args.labels_list, dtype=np.int16).tolist()
            indices_1, indices_2 = [], []
            for j in ordering[0]:
                indices_1.append(labels_list.index(j))
            for j in ordering[1]:
                indices_2.append(labels_list.index(j))
        else:
            indices_1 = ordering[0]
            indices_2 = ordering[1]

        if (np.array(indices_1) > matrix.shape[0]).any() \
                or (indices_2 > np.array(matrix.shape[1])).any():
            raise ValueError('Indices from config higher than matrix size, '
                             'maybe you need a labels list?')
        tmp_matrix = matrix[tuple(indices_1), :]
        tmp_matrix = tmp_matrix[:, tuple(indices_2)]
        save_matrix_in_any_format('{}/{}{}.npy'.format(args.out_dir,
                                                       basename,
                                                       args.out_suffix),
                                  tmp_matrix)
Beispiel #7
0
def main():
    parser = _build_arg_parser()
    args = parser.parse_args()

    if args.verbose:
        logging.basicConfig(level=logging.DEBUG)

    assert_inputs_exist(parser, [args.in_moving_tractogram,
                                 args.in_target_file,
                                 args.in_transfo], args.in_deformation)
    assert_outputs_exist(parser, args, args.out_tractogram)

    moving_sft = load_tractogram_with_reference(parser, args,
                                                args.in_moving_tractogram,
                                                bbox_check=False)

    transfo = load_matrix_in_any_format(args.in_transfo)
    deformation_data = None
    if args.in_deformation is not None:
        deformation_data = np.squeeze(nib.load(
            args.in_deformation).get_fdata(dtype=np.float32))

    new_sft = transform_warp_sft(moving_sft, transfo,
                                 args.in_target_file,
                                 inverse=args.inverse,
                                 reverse_op=args.reverse_operation,
                                 deformation_data=deformation_data,
                                 remove_invalid=args.remove_invalid,
                                 cut_invalid=args.cut_invalid)

    if len(new_sft.streamlines) == 0:
        if args.no_empty:
            logging.debug("The file {} won't be written "
                          "(0 streamline).".format(args.out_tractogram))
            return

    if args.keep_invalid:
        if not new_sft.is_bbox_in_vox_valid():
            logging.warning('Saving tractogram with invalid streamlines.')
        save_tractogram(new_sft, args.out_tractogram, bbox_valid_check=False)
    else:
        if not new_sft.is_bbox_in_vox_valid():
            logging.warning('Removing invalid streamlines before '
                            'saving tractogram.')
            new_sft.remove_invalid_streamlines()
        save_tractogram(new_sft, args.out_tractogram)
def load_data(arg):
    if is_float(arg):
        data = float(arg)
    else:
        if not os.path.isfile(arg):
            logging.error('Input file %s does not exist', arg)
            raise ValueError

        data = load_matrix_in_any_format(arg)
        logging.info('Loaded %s of shape %s and data_type %s', arg, data.shape,
                     data.dtype)

        if data.ndim > 2:
            logging.warning('%s has %s dimensions, be careful', arg, data.ndim)
        elif data.ndim < 2:
            logging.warning('%s has %s dimensions, not valid ', arg, data.ndim)
            raise ValueError

    return data
def load_data(arg):
    if is_float(arg):
        data = float(arg)
    else:
        if not os.path.isfile(arg):
            raise ValueError('Input file {} does not exist.'.format(arg))

        data = load_matrix_in_any_format(arg)
        logging.info('Loaded {} of shape {} and data_type {}.'.format(
            arg, data.shape, data.dtype))

        if data.ndim > 2:
            logging.warning('{} has {} dimensions, be careful.'.format(
                arg, data.ndim))
        elif data.ndim < 2:
            raise ValueError('{} has {} dimensions, not valid.'.format(
                arg, data.ndim))

    return data
Beispiel #10
0
def load_matrix(arg):
    if is_float(arg):
        matrix = float(arg)
    else:
        if not os.path.isfile(arg):
            raise ValueError('Input file {} does not exist.'.format(arg))

        data = load_matrix_in_any_format(arg).astype(np.float64)
        matrix = nib.Nifti1Image(data, np.eye(4))
        logging.info('Loaded {} of shape {} and data_type {}.'.format(
            arg, data.shape, data.dtype))

        if data.ndim > 2:
            logging.warning('{} has {} dimensions, be careful.'.format(
                arg, data.ndim))
        elif data.ndim < 2:
            raise ValueError('{} has {} dimensions, not valid.'.format(
                arg, data.ndim))

    return matrix
def main():
    parser = _build_arg_parser()
    args = parser.parse_args()

    assert_inputs_exist(parser, args.in_matrix)
    assert_outputs_exist(parser, args, args.out_txt)

    matrix = load_matrix_in_any_format(args.in_matrix)
    labels_list = np.loadtxt(args.labels_list).astype(np.uint16)

    text_file = open(args.out_txt, 'w')
    for pos_1, pos_2 in np.argwhere(matrix > 0):
        in_label = labels_list[pos_1]
        out_label = labels_list[pos_2]

        # scil_decompose_connectivity.py only save the lower triangular files
        if out_label < in_label:
            continue
        text_file.write('{}_{}.trk\n'.format(in_label, out_label))
    text_file.close()
def main():
    parser = _build_arg_parser()
    args = parser.parse_args()

    assert_inputs_exist(parser, [args.in_file, args.in_target_file,
                                 args.in_transfo])
    assert_outputs_exist(parser, args, args.out_name)

    transfo = load_matrix_in_any_format(args.in_transfo)
    if args.inverse:
        transfo = np.linalg.inv(transfo)

    _, ref_extension = split_name_with_nii(args.in_target_file)
    _, in_extension = split_name_with_nii(args.in_file)
    if ref_extension not in ['.nii', '.nii.gz']:
        parser.error('{} is an unsupported format.'.format(args.in_target_file))
    if in_extension not in ['.nii', '.nii.gz']:
        parser.error('{} is an unsupported format.'.format(args.in_file))

    transform_anatomy(transfo, args.in_target_file, args.in_file,
                      args.out_name, keep_dtype=args.keep_dtype)
def main():
    parser = _build_arg_parser()
    args = parser.parse_args()

    assert_outputs_exist(parser, args, args.out_matrix_mask)

    if not args.lower_than and not args.greater_than:
        parser.error('At least one of the two options is required.')

    if args.verbose:
        logging.basicConfig(level=logging.DEBUG)

    conditions_list = []
    if args.lower_than:
        for input_list in args.lower_than:
            conditions_list.append(('lower', input_list))
    if args.greater_than:
        for input_list in args.greater_than:
            conditions_list.append(('greater', input_list))

    condition_counter = 0
    shape = load_matrix_in_any_format(conditions_list[0][1][0]).shape
    output_mask = np.zeros(shape)
    for input_tuple_list in conditions_list:
        condition = input_tuple_list[0]
        input_list = input_tuple_list[1]

        condition_counter += 1
        matrices = [load_matrix_in_any_format(i) for i in input_list[:-2]]
        matrices = np.rollaxis(np.array(matrices), axis=0, start=3)
        value_threshold = float(input_list[-2])
        population_threshold = int(float(input_list[-1]) * matrices.shape[-1])

        empty_matrices = np.zeros(matrices.shape)
        # Only difference between both condition, the rest is identical
        if condition == 'lower':
            empty_matrices[matrices < value_threshold] = 1
        else:
            empty_matrices[matrices >= value_threshold] = 1

        population_score = np.sum(empty_matrices, axis=2)

        logging.debug(
            'Condition {}_than (#{}) resulted in {} filtered '
            'elements out of {}.'.format(
                condition, condition_counter,
                len(np.where(population_score < population_threshold)[0]),
                np.prod(shape)))

        output_mask[population_score >= population_threshold] += 1

    if not args.keep_condition_count:
        output_mask[output_mask < condition_counter] = 0
        output_mask[output_mask > 0] = 1

    if args.inverse_mask:
        if args.keep_condition_count:
            output_mask = np.abs(output_mask - np.max(output_mask))
        else:
            output_mask = invert([output_mask])

    filtered_elem = np.prod(shape) - np.count_nonzero(output_mask)

    # To prevent mis-usage, --keep_condition_count should not be used for
    # masking without binarization first
    if args.keep_condition_count:
        logging.warning('Keeping the condition count is not recommanded for '
                        'filtering.\nApply threshold manually to binarize the '
                        'output matrix.')
    else:
        logging.debug('All condition resulted in {} filtered '
                      'elements out of {}.'.format(filtered_elem,
                                                   np.prod(shape)))

    save_matrix_in_any_format(args.out_matrix_mask,
                              output_mask.astype(np.uint8))
Beispiel #14
0
def main():
    parser = _build_arg_parser()
    args = parser.parse_args()

    if args.verbose:
        logging.basicConfig(level=logging.INFO)

    assert_outputs_exist(parser, args, args.out_matrix)

    # Binary operations require specific verifications
    binary_op = ['union', 'intersection', 'difference', 'invert']

    if args.operation not in OPERATIONS.keys():
        parser.error('Operation {} not implemented.'.format(args.operation))

    # Find at least one matrix for reference
    for input_arg in args.in_matrices:
        found_ref = False
        if not is_float(input_arg):
            ref_data = load_matrix_in_any_format(input_arg)
            ref_matrix = nib.Nifti1Image(ref_data, np.eye(4))
            mask = np.zeros(ref_data.shape)
            found_ref = True
            break

    if not found_ref:
        raise ValueError('Requires at least one matrix.')

    # Load all input matrices
    input_matrices = []
    for input_arg in args.in_matrices:
        matrix = load_matrix(input_arg)

        if args.operation in binary_op and isinstance(matrix, nib.Nifti1Image):
            data = matrix.get_fdata(dtype=np.float64)
            unique = np.unique(data)
            if not len(unique) <= 2:
                parser.error('Binary operations can only be performed with '
                             'binary masks.')

            if len(unique) == 2 and not (unique == [0, 1]).all():
                logging.warning('Input data for binary operation are not '
                                'binary array, will be converted.\n'
                                'Non-zeros will be set to ones.')
                data[data != 0] = 1

        if isinstance(matrix, nib.Nifti1Image):
            data = matrix.get_fdata(dtype=np.float64)
            mask[data > 0] = 1
        input_matrices.append(matrix)

    if args.operation == 'convert' and not args.data_type:
        parser.error('Convert operation must be used with --data_type.')

    # Perform the request operation
    try:
        output_data = OPERATIONS[args.operation](input_matrices, ref_matrix)
    except ValueError:
        logging.error('{} operation failed.'.format(
            args.operation.capitalize()))
        return

    # Cast if needed
    if args.data_type:
        output_data = output_data.astype(args.data_type)
    else:
        output_data = output_data.astype(np.float64)

    if args.exclude_background:
        output_data[mask == 0] = 0

    # Saving in the right format
    save_matrix_in_any_format(args.out_matrix, output_data)
Beispiel #15
0
def main():
    parser = _build_arg_parser()
    args = parser.parse_args()

    assert_inputs_exist(parser, args.in_matrix,
                        [args.length, args.inverse_length, args.bundle_volume])
    assert_outputs_exist(parser, args, args.out_matrix)

    in_matrix = load_matrix_in_any_format(args.in_matrix)

    # Parcel volume and surface normalization require the atlas
    # This script should be used directly after scil_decompose_connectivity.py
    if args.parcel_volume or args.parcel_surface:
        atlas_tuple = args.parcel_volume if args.parcel_volume \
            else args.parcel_surface
        atlas_filepath, labels_filepath = atlas_tuple
        assert_inputs_exist(parser, [atlas_filepath, labels_filepath])

        atlas_img = nib.load(atlas_filepath)
        atlas_data = get_data_as_label(atlas_img)

        voxels_size = atlas_img.header.get_zooms()[:3]
        if voxels_size[0] != voxels_size[1] \
           or voxels_size[0] != voxels_size[2]:
            parser.error('Atlas must have an isotropic resolution.')

        voxels_vol = np.prod(atlas_img.header.get_zooms()[:3])
        voxels_sur = np.prod(atlas_img.header.get_zooms()[:2])

        # Excluding background (0)
        labels_list = np.loadtxt(labels_filepath)
        if len(labels_list) != in_matrix.shape[0] \
                and len(labels_list) != in_matrix.shape[1]:
            parser.error('Atlas should have the same number of label as the '
                         'input matrix.')

    # Normalization can be combined together
    out_matrix = in_matrix
    if args.length:
        length_mat = load_matrix_in_any_format(args.length)
        out_matrix[length_mat > 0] *= length_mat[length_mat > 0]
    elif args.inverse_length:
        length_mat = load_matrix_in_any_format(args.inverse_length)
        out_matrix[length_mat > 0] /= length_mat[length_mat > 0]

    if args.bundle_volume:
        volume_mat = load_matrix_in_any_format(args.bundle_volume)
        out_matrix[volume_mat > 0] /= volume_mat[volume_mat > 0]

    # Node-wise computation are necessary for this type of normalize
    if args.parcel_volume or args.parcel_surface:
        out_matrix = copy(in_matrix)
        pos_list = range(len(labels_list))
        all_comb = list(itertools.combinations(pos_list, r=2))
        all_comb.extend(zip(pos_list, pos_list))

        # Prevent useless computions for approximate_surface_node()
        factor_list = []
        for label in labels_list:
            if args.parcel_volume:
                factor_list.append(
                    np.count_nonzero(atlas_data == label) * voxels_vol)
            else:
                if np.count_nonzero(atlas_data == label):
                    roi = np.zeros(atlas_data.shape)
                    roi[atlas_data == label] = 1
                    factor_list.append(
                        approximate_surface_node(roi) * voxels_sur)
                else:
                    factor_list.append(0)

        for pos_1, pos_2 in all_comb:
            factor = factor_list[pos_1] + factor_list[pos_2]
            if abs(factor) > 0.001:
                out_matrix[pos_1, pos_2] /= factor
                out_matrix[pos_2, pos_1] /= factor

    # Load as image
    ref_matrix = nib.Nifti1Image(in_matrix, np.eye(4))
    # Simple scaling of the whole matrix, facilitate comparison across subject
    if args.max_at_one:
        out_matrix = nib.Nifti1Image(out_matrix, np.eye(4))
        out_matrix = normalize_max([out_matrix], ref_matrix)
    elif args.sum_to_one:
        out_matrix = nib.Nifti1Image(out_matrix, np.eye(4))
        out_matrix = normalize_sum([out_matrix], ref_matrix)
    elif args.log_10:
        out_matrix = nib.Nifti1Image(out_matrix, np.eye(4))
        out_matrix = base_10_log([out_matrix], ref_matrix)

    save_matrix_in_any_format(args.out_matrix, out_matrix)
Beispiel #16
0
def main():
    parser = _build_arg_parser()
    args = parser.parse_args()

    assert_inputs_exist(parser, [args.in_length_matrix, args.in_conn_matrix])

    if args.verbose:
        logging.basicConfig(level=logging.DEBUG)

    if not args.append_json:
        assert_outputs_exist(parser, args, args.out_json)
    else:
        logging.debug('Using --append_json, make sure to delete {} '
                      'before re-launching a group analysis.'.format(
                          args.out_json))

    if args.append_json and args.overwrite:
        parser.error('Cannot use the append option at the same time as '
                     'overwrite.\nAmbiguous behavior, consider deleting the '
                     'output json file first instead.')

    conn_matrix = load_matrix_in_any_format(args.in_conn_matrix)
    len_matrix = load_matrix_in_any_format(args.in_length_matrix)

    if args.filtering_mask:
        mask_matrix = load_matrix_in_any_format(args.filtering_mask)
        conn_matrix *= mask_matrix
        len_matrix *= mask_matrix
    N = len_matrix.shape[0]

    if args.avg_node_wise:
        func_cast = avg_cast
    else:
        func_cast = list_cast

    gtm_dict = {}
    betweenness_centrality = bct.betweenness_wei(len_matrix) / ((N - 1) *
                                                                (N - 2))
    gtm_dict['betweenness_centrality'] = func_cast(betweenness_centrality)
    ci, gtm_dict['modularity'] = bct.modularity_louvain_und(conn_matrix,
                                                            seed=0)

    gtm_dict['assortativity'] = bct.assortativity_wei(conn_matrix, flag=0)
    gtm_dict['participation'] = func_cast(
        bct.participation_coef_sign(conn_matrix, ci)[0])
    gtm_dict['clustering'] = func_cast(bct.clustering_coef_wu(conn_matrix))

    gtm_dict['nodal_strength'] = func_cast(bct.strengths_und(conn_matrix))
    gtm_dict['local_efficiency'] = func_cast(
        bct.efficiency_wei(len_matrix, local=True))
    gtm_dict['global_efficiency'] = func_cast(bct.efficiency_wei(len_matrix))
    gtm_dict['density'] = func_cast(bct.density_und(conn_matrix)[0])

    # Rich club always gives an error for the matrix rank and gives NaN
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        tmp_rich_club = bct.rich_club_wu(conn_matrix)
    gtm_dict['rich_club'] = func_cast(tmp_rich_club[~np.isnan(tmp_rich_club)])

    # Path length gives an infinite distance for unconnected nodes
    # All of this is simply to fix that
    empty_connections = np.where(np.sum(len_matrix, axis=1) < 0.001)[0]
    if len(empty_connections):
        len_matrix = np.delete(len_matrix, empty_connections, axis=0)
        len_matrix = np.delete(len_matrix, empty_connections, axis=1)

    path_length_tuple = bct.distance_wei(len_matrix)
    gtm_dict['path_length'] = func_cast(path_length_tuple[0])
    gtm_dict['edge_count'] = func_cast(path_length_tuple[1])

    if not args.avg_node_wise:
        for i in empty_connections:
            gtm_dict['path_length'].insert(i, -1)
            gtm_dict['edge_count'].insert(i, -1)

    if args.small_world:
        gtm_dict['omega'], gtm_dict['sigma'] = omega_sigma(len_matrix)

    if os.path.isfile(args.out_json) and args.append_json:
        with open(args.out_json) as json_data:
            out_dict = json.load(json_data)
        for key in gtm_dict.keys():
            if isinstance(out_dict[key], list):
                out_dict[key].append(gtm_dict[key])
            else:
                out_dict[key] = [out_dict[key], gtm_dict[key]]
    else:
        out_dict = {}
        for key in gtm_dict.keys():
            out_dict[key] = [gtm_dict[key]]

    with open(args.out_json, 'w') as outfile:
        json.dump(out_dict,
                  outfile,
                  indent=args.indent,
                  sort_keys=args.sort_keys)
Beispiel #17
0
def main():
    parser = _build_arg_parser()
    args = parser.parse_args()

    assert_inputs_exist(parser, args.in_matrix)
    if not args.show_only:
        assert_outputs_exist(parser, args, args.out_png, args.histogram)

    if args.lookup_table and not args.labels_list:
        parser.error('Lookup table axis naming requires --labels_list.')

    matrix = load_matrix_in_any_format(args.in_matrix)

    if args.log:
        matrix[matrix > EPSILON] = np.log10(matrix[matrix > EPSILON])
        min_value = np.min(matrix)
        matrix[np.abs(matrix) < EPSILON] = -65536
    else:
        min_value = np.min(matrix)

    fig, ax = plt.subplots()
    im = ax.imshow(matrix,
                   interpolation='nearest',
                   cmap=args.colormap,
                   vmin=min_value)

    if args.write_values:
        if np.prod(matrix.shape) > 1000:
            logging.warning('Large matrix, please consider not using '
                            '--write_values.')
        ax = write_values(ax, matrix, args.write_values)

    if args.display_legend:
        fig.colorbar(im, ax=ax)

    if args.name_axis:
        x_ticks = np.arange(matrix.shape[0])
        y_ticks = np.arange(matrix.shape[1])

        if args.labels_list:
            labels_list = np.loadtxt(args.labels_list, dtype=np.int16).tolist()

        if args.labels_list and not args.reorder_txt and not args.lookup_table:
            if len(labels_list) != matrix.shape[0] \
                    or len(labels_list) != matrix.shape[1]:
                logging.warning('The provided matrix not the same size as '
                                'the labels list.')
            x_legend = labels_list[0:matrix.shape[0]]
            y_legend = labels_list[0:matrix.shape[1]]
        else:
            x_legend = x_ticks
            y_legend = y_ticks

        if args.reorder_txt:
            with open(args.reorder_txt, 'r') as my_file:
                lines = my_file.readlines()
                x_legend = [int(val) for val in lines[0].split()]
                y_legend = [int(val) for val in lines[1].split()]

        if args.lookup_table:
            if args.reorder_txt:
                logging.warning(
                    'Using a lookup table, make sure the reordering '
                    'json contain labels, not coordinates')
            with open(args.lookup_table) as json_data:
                lut = json.load(json_data)

            x_legend = []
            y_legend = []
            if args.reorder_txt:
                with open(args.reorder_txt, 'r') as my_file:
                    lines = my_file.readlines()
                    x_list = [int(val) for val in lines[0].split()]
                    y_list = [int(val) for val in lines[1].split()]
            else:
                x_list = labels_list[0:matrix.shape[0]]
                y_list = labels_list[0:matrix.shape[1]]

            x_legend = [
                lut[str(x)] if str(x) in lut else str(x) for x in x_list
            ]
            y_legend = [
                lut[str(x)] if str(x) in lut else str(x) for x in y_list
            ]

        if len(x_ticks) != len(x_legend) \
                or len(y_ticks) != len(y_legend):
            logging.warning(
                'Legend is not the same size as the data.'
                'Make sure you are using the same reordering json.')
        plt.xticks(x_ticks,
                   x_legend,
                   rotation=args.axis_text_angle[0],
                   fontsize=args.axis_text_size[0])
        plt.yticks(y_ticks,
                   y_legend,
                   rotation=args.axis_text_angle[1],
                   fontsize=args.axis_text_size[1])

    if args.show_only:
        plt.show()
    else:
        plt.savefig(args.out_png, dpi=300, bbox_inches='tight')

    if args.histogram:
        fig, ax = plt.subplots()
        if args.exclude_zeros:
            matrix = matrix[matrix != 0]
        _, _, patches = ax.hist(matrix.ravel(), bins=args.nb_bins)
        nbr_bins = len(patches)
        color = plt.cm.get_cmap(args.colormap)(np.linspace(0, 1, nbr_bins))
        for i in range(0, nbr_bins):
            patches[i].set_facecolor(color[i])

        if args.show_only:
            plt.show()
        else:
            plt.savefig(args.histogram, dpi=300, bbox_inches='tight')
Beispiel #18
0
def main():
    parser = _build_arg_parser()
    args = parser.parse_args()

    assert_inputs_exist(parser, args.in_matrix)
    if not args.show_only:
        assert_outputs_exist(parser, args, args.out_png, args.histogram)

    if args.lookup_table and not args.labels_list:
        parser.error('Lookup table axis naming requires --labels_list.')

    matrix = load_matrix_in_any_format(args.in_matrix)
    matrix[np.isnan(matrix)] = 0
    if args.log:
        matrix[matrix > EPSILON] = np.log10(matrix[matrix > EPSILON])
        min_value = np.min(matrix)
        matrix[np.abs(matrix) < EPSILON] = -65536
    else:
        min_value = np.min(matrix)

    max_value = None
    if args.legend_min_max is not None:
        min_value = args.legend_min_max[0]
        max_value = args.legend_min_max[1]

    fig, ax = plt.subplots()
    im = ax.imshow(matrix.T,
                   interpolation='nearest',
                   cmap=args.colormap,
                   vmin=min_value,
                   vmax=max_value)

    if args.write_values:
        if np.prod(matrix.shape) > 1000:
            logging.warning('Large matrix, please consider not using '
                            '--write_values.')
        ax = write_values(ax, matrix, args.write_values)

    if args.display_legend:
        fig.colorbar(im, ax=ax)

    if args.name_axis:
        y_ticks = np.arange(matrix.shape[0])
        x_ticks = np.arange(matrix.shape[1])

        if args.labels_list:
            labels_list = np.loadtxt(args.labels_list, dtype=np.int16).tolist()

        if args.labels_list and not args.reorder_txt and not args.lookup_table:
            if len(labels_list) != matrix.shape[0] \
                    or len(labels_list) != matrix.shape[1]:
                logging.warning('The provided matrix not the same size as '
                                'the labels list.')
            y_legend = labels_list[0:matrix.shape[0]]
            x_legend = labels_list[0:matrix.shape[1]]
        else:
            y_legend = y_ticks
            x_legend = x_ticks

        if args.reorder_txt:
            with open(args.reorder_txt, 'r') as my_file:
                lines = my_file.readlines()
                y_legend = [int(val) for val in lines[0].split()]
                x_legend = [int(val) for val in lines[1].split()]

        if args.lookup_table:
            if args.reorder_txt:
                logging.warning('Using a lookup table, make sure the '
                                'reordering json contain labels, not '
                                'coordinates')
            with open(args.lookup_table) as json_data:
                lut = json.load(json_data)

            y_legend = []
            x_legend = []
            if args.reorder_txt:
                with open(args.reorder_txt, 'r') as my_file:
                    lines = my_file.readlines()
                    y_list = [int(val) for val in lines[0].split()]
                    x_list = [int(val) for val in lines[1].split()]
            else:
                y_list = labels_list[0:matrix.shape[0]]
                x_list = labels_list[0:matrix.shape[1]]

            y_legend = [
                lut[str(x)] if str(x) in lut else str(x) for x in y_list
            ]
            x_legend = [
                lut[str(x)] if str(x) in lut else str(x) for x in x_list
            ]

        if len(y_ticks) != len(y_legend) \
                or len(x_ticks) != len(x_legend):
            logging.warning('Legend is not the same size as the data.'
                            'Make sure you are using the same reordering '
                            'json.')
        plt.xticks(x_ticks,
                   x_legend,
                   rotation=args.axis_text_angle[0],
                   fontsize=args.axis_text_size[0])
        plt.yticks(y_ticks,
                   y_legend,
                   rotation=args.axis_text_angle[1],
                   fontsize=args.axis_text_size[1])

    if args.show_only:
        plt.show()
    else:
        plt.savefig(args.out_png, dpi=300, bbox_inches='tight')

    if args.histogram:
        fig, ax = plt.subplots()
        if args.exclude_zeros:
            matrix_hist = matrix[matrix != 0]
        else:
            matrix_hist = matrix.ravel()

        _, _, patches = ax.hist(matrix_hist, bins=args.nb_bins)
        nbr_bins = len(patches)
        color = plt.cm.get_cmap(args.colormap)(np.linspace(0, 1, nbr_bins))
        for i in range(0, nbr_bins):
            patches[i].set_facecolor(color[i])

        if args.show_only:
            plt.show()
        else:
            plt.savefig(args.histogram, dpi=300, bbox_inches='tight')

    if args.chord_chart:
        if not args.name_axis:
            if matrix.shape[0] != matrix.shape[1]:
                print('Warning, the matrix is not square, the parcels order on'
                      'both axis must be the same.')
            x_legend = [str(i) for i in range(matrix.shape[0])]
            y_legend = [str(i) for i in range(matrix.shape[1])]
        if isinstance(x_legend, np.ndarray):
            x_legend = x_legend.tolist()
            y_legend = y_legend.tolist()

        total_legend = copy.copy(x_legend)
        total_legend.extend(y_legend)
        total_legend = set(total_legend)
        if args.lookup_table:
            total_legend = sorted(total_legend)
        else:
            total_legend = sorted(total_legend, key=int)

        new_matrix = np.zeros((len(total_legend), len(total_legend)))
        for x in range(len(total_legend)):
            for y in range(len(total_legend)):
                if total_legend[x] in x_legend and total_legend[y] in y_legend:
                    i = x_legend.index(total_legend[x])
                    j = y_legend.index(total_legend[y])
                    new_matrix[x, y] = matrix[i, j]
                    new_matrix[y, x] = matrix[i, j]

        fig = plt.figure(figsize=(6, 6))
        ax = plt.axes([0, 0, 1, 1])
        new_matrix[new_matrix < np.percentile(new_matrix,
                                              args.percentile_threshold)] = 0

        empty_to_del = (np.where(~new_matrix.any(axis=1))[0])
        non_empty_to_keep = np.setdiff1d(range(len(total_legend)),
                                         empty_to_del)
        total_legend = [total_legend[i] for i in non_empty_to_keep]
        new_matrix = np.delete(new_matrix, empty_to_del, axis=0)
        new_matrix = np.delete(new_matrix, empty_to_del, axis=1)

        cmap = matplotlib.cm.get_cmap(args.colormap)
        colors = [cmap(i)[0:3] for i in np.linspace(0, 1, len(new_matrix))]
        nodePos = chordDiagram(new_matrix,
                               ax,
                               colors=colors,
                               angle_threshold=args.angle_threshold,
                               alpha=args.alpha,
                               text_dist=args.text_distance)
        ax.axis('off')
        prop = dict(fontsize=args.text_size, ha='center', va='center')
        previous_val = 0
        first_flip = False
        flip = 1
        for i in range(len(new_matrix)):
            radians = math.radians(nodePos[i][2])
            if nodePos[i][2] > previous_val:
                previous_val = nodePos[i][2]
            else:
                flip = -1
                previous_val = 0
                first_flip = True

            if nodePos[i][2] > 270:
                flip = 1 if first_flip else -1
            if isinstance(total_legend[i], str):
                text_len = len(total_legend[i])
            else:
                text_len = len(str(total_legend[i]))

            textPos = polar2xy(text_len * args.text_size * 0.001 * flip,
                               radians)

            ax.text(nodePos[i][0] + textPos[0],
                    nodePos[i][1] + textPos[1],
                    total_legend[i],
                    rotation=nodePos[i][2],
                    **prop)

        if args.show_only:
            plt.show()
        else:
            plt.savefig(args.chord_chart, dpi=600, bbox_inches='tight')
def main():
    parser = _build_arg_parser()
    args = parser.parse_args()

    assert_inputs_exist(parser, args.in_matrix,
                        [args.labels_list, args.in_json])

    if args.labels_list and args.bct_reorder_nodes:
        parser.error('Cannot use the bct_reorder_nodes option with a '
                     'labels_list.')

    if args.keys and args.bct_reorder_nodes:
        parser.error('Cannot use the bct_reorder_nodes option with keys.')

    matrix = load_matrix_in_any_format(args.in_matrix)

    if args.in_json:
        with open(args.in_json) as json_data:
            config = json.load(json_data)
        if args.keys:
            keys = args.keys
        else:
            keys = config.keys()
        out_filenames = []
        for key in keys:
            out_filenames.append('{}_{}.npy'.format(args.out_prefix, key))
        assert_outputs_exist(parser, args, out_filenames)

        for i, key in enumerate(keys):
            if args.labels_list:
                labels_list = np.loadtxt(
                    args.labels_list, dtype=np.int16).tolist()
                indices_1, indices_2 = [], []
                for j in config[key][0]:
                    indices_1.append(labels_list.index(j))
                for j in config[key][1]:
                    indices_2.append(labels_list.index(j))
            else:
                if key not in config:
                    raise ValueError('{} not in config, maybe you need a labels '
                                     'list?'.format(key))
                indices_1 = config[key][0]
                indices_2 = config[key][1]

            if (np.array(indices_1) > matrix.shape[0]).any() \
                    or (indices_2 > np.array(matrix.shape[1])).any():
                raise ValueError('Indices from config higher than matrix size, '
                                 'maybe you need a labels list?')
            tmp_matrix = matrix[tuple(indices_1), :]
            tmp_matrix = tmp_matrix[:, tuple(indices_2)]
            save_matrix_in_any_format(out_filenames[i], tmp_matrix)
    else:
        assert_outputs_exist(parser, args, [], args.bct_reorder_nodes)

        out_matrix, out_indices, _ = bct.reorder_matrix(matrix)
        out_json = args.bct_reorder_nodes

        out_indices = out_indices.tolist()
        save_matrix_in_any_format('{}_{}.npy'.format(
            args.out_prefix, 'bct_reorder_nodes'), out_matrix)
        out_indices_dict = {'bct_reorder_nodes': [out_indices,
                                                  out_indices]}

        with open(out_json, 'w') as outfile:
            json.dump(out_indices_dict, outfile, indent=2)
Beispiel #20
0
def main():
    parser = _build_arg_parser()
    args = parser.parse_args()

    assert_inputs_exist(parser, [args.in_hdf5, args.in_target_file,
                                 args.in_transfo], args.in_deformation)
    assert_outputs_exist(parser, args, args.out_hdf5)

    # HDF5 will not overwrite the file
    if os.path.isfile(args.out_hdf5):
        os.remove(args.out_hdf5)

    with h5py.File(args.in_hdf5, 'r') as in_hdf5_file:
        shutil.copy(args.in_hdf5, args.out_hdf5)
        with h5py.File(args.out_hdf5, 'a') as out_hdf5_file:
            transfo = load_matrix_in_any_format(args.in_transfo)

            deformation_data = None
            if args.in_deformation is not None:
                deformation_data = np.squeeze(nib.load(
                    args.in_deformation).get_fdata(dtype=np.float32))
            target_img = nib.load(args.in_target_file)

            for key in in_hdf5_file.keys():
                affine = in_hdf5_file.attrs['affine']
                dimensions = in_hdf5_file.attrs['dimensions']
                voxel_sizes = in_hdf5_file.attrs['voxel_sizes']
                streamlines = reconstruct_streamlines_from_hdf5(
                    in_hdf5_file, key)

                if len(streamlines) == 0:
                    continue

                header = create_nifti_header(affine, dimensions, voxel_sizes)
                moving_sft = StatefulTractogram(streamlines, header, Space.VOX,
                                                origin=Origin.TRACKVIS)

                new_sft = transform_warp_streamlines(
                    moving_sft, transfo, target_img,
                    inverse=args.inverse,
                    deformation_data=deformation_data,
                    remove_invalid=not args.cut_invalid,
                    cut_invalid=args.cut_invalid)
                new_sft.to_vox()
                new_sft.to_corner()

                affine, dimensions, voxel_sizes, voxel_order = get_reference_info(
                    target_img)
                out_hdf5_file.attrs['affine'] = affine
                out_hdf5_file.attrs['dimensions'] = dimensions
                out_hdf5_file.attrs['voxel_sizes'] = voxel_sizes
                out_hdf5_file.attrs['voxel_order'] = voxel_order

                group = out_hdf5_file[key]
                del group['data']
                group.create_dataset('data',
                                     data=new_sft.streamlines.get_data())
                del group['offsets']
                group.create_dataset('offsets',
                                     data=new_sft.streamlines._offsets)
                del group['lengths']
                group.create_dataset('lengths',
                                     data=new_sft.streamlines._lengths)
def main():
    parser = _build_arg_parser()
    args = parser.parse_args()

    assert_inputs_exist(parser, args.in_g1 + args.in_g2, args.filtering_mask)
    assert_outputs_exist(parser, args, args.out_pval_matrix)

    if args.verbose:
        logging.basicConfig(level=logging.DEBUG)

    if args.filtering_mask:
        filtering_mask = load_matrix_in_any_format(args.filtering_mask)
    else:
        filtering_mask = 1

    matrices_g1 = [
        load_matrix_in_any_format(i) * filtering_mask for i in args.in_g1
    ]
    matrices_g2 = [
        load_matrix_in_any_format(i) * filtering_mask for i in args.in_g2
    ]
    matrices_g1 = np.rollaxis(np.array(matrices_g1), axis=0, start=3)
    matrices_g2 = np.rollaxis(np.array(matrices_g2), axis=0, start=3)

    if matrices_g1.shape[0:2] != matrices_g2.shape[0:2]:
        parser.error('Both groups have different matrices dimensions (NxN).')
    if args.paired and matrices_g1.shape[2] != matrices_g2.shape[2]:
        parser.error('For paired statistic both groups must have the same '
                     'number of observations.')

    matrix_shape = matrices_g1.shape[0:2]
    nb_group_g1 = matrices_g1.shape[2]
    nb_group_g2 = matrices_g2.shape[2]

    # To do better reshape, more simple
    sum_both_groups = np.sum(matrices_g1, axis=2) + np.sum(matrices_g2, axis=2)
    nbr_non_zeros = np.count_nonzero(np.triu(sum_both_groups))

    logging.debug(
        'The provided matrices contain {} non zeros elements.'.format(
            nbr_non_zeros))

    matrices_g1 = matrices_g1.reshape((np.prod(matrix_shape), nb_group_g1))
    matrices_g2 = matrices_g2.reshape((np.prod(matrix_shape), nb_group_g2))
    # Negative epsilon, to differenciate from null p-values
    matrix_pval = np.ones(np.prod(matrix_shape)) * -0.000001

    text = ' paired' if args.paired else ''
    logging.debug('Performing{} t-test with "{}" hypothesis.'.format(
        text, args.tail))
    logging.debug(
        'Data has dimensions {}x{} with {} and {} observations.'.format(
            matrix_shape[0], matrix_shape[1], nb_group_g1, nb_group_g2))

    # For conversion to p-values
    if args.paired:
        dof = nb_group_g1 - 1
    else:
        dof = nb_group_g1 + nb_group_g2 - 2

    for ind in range(np.prod(matrix_shape)):
        # Skip edges with no data, leaves a negative epsilon instead
        if not matrices_g1[ind].any() and not matrices_g2[ind].any():
            continue

        if args.paired:
            t_stat = ttest_paired_stat_only(matrices_g1[ind], matrices_g2[ind],
                                            args.tail)
        else:
            t_stat = ttest_stat_only(matrices_g1[ind], matrices_g2[ind],
                                     args.tail)

        pval = t.sf(t_stat, dof)
        matrix_pval[ind] = pval if args.tail == 'both' else pval / 2.0

    corr_matrix_pval = matrix_pval.reshape(matrix_shape)
    if args.fdr:
        logging.debug('Using FDR, the results will be q-values.')
        corr_matrix_pval = np.triu(corr_matrix_pval)
        corr_matrix_pval[corr_matrix_pval > 0] = multipletests(
            corr_matrix_pval[corr_matrix_pval > 0], 0, method='fdr_bh')[1]

        # Symmetrize  the matrix
        matrix_pval = corr_matrix_pval + corr_matrix_pval.T - \
            np.diag(corr_matrix_pval.diagonal())
    elif args.bonferroni:
        corr_matrix_pval = np.triu(corr_matrix_pval)
        corr_matrix_pval[corr_matrix_pval > 0] = multipletests(
            corr_matrix_pval[corr_matrix_pval > 0], 0, method='bonferroni')[1]

        # Symmetrize  the matrix
        matrix_pval = corr_matrix_pval + corr_matrix_pval.T - \
            np.diag(corr_matrix_pval.diagonal())
    else:
        matrix_pval = matrix_pval.reshape(matrix_shape)

    save_matrix_in_any_format(args.out_pval_matrix, matrix_pval)

    # Save the significant edges (equivalent to an upper_threshold)
    # 0 where it is not significant and 1 where it is significant
    if args.p_threshold:
        p_thresh = float(args.p_threshold[0])
        masked_pval_matrix = np.zeros(matrix_shape)
        logging.debug('Threshold the p-values at {}'.format(p_thresh))
        masked_pval_matrix[matrix_pval < p_thresh] = 1
        masked_pval_matrix[matrix_pval < 0] = 0

        save_matrix_in_any_format(args.p_threshold[1], masked_pval_matrix)