Ejemplo n.º 1
0
    def _extract_warped_image_landmarks(self, record):
        """ get registration results - warped registered images and landmarks

        :param record: {str: value}, dictionary with registration params
        :return (str, str, str, str): paths to
        """
        logging.debug('.. warp the registered image and get landmarks')
        path_dir = self._get_path_reg_dir(record)
        path_im_ref, path_im_move, _, path_lnds_move = self._get_paths(record)
        path_log = os.path.join(path_dir, NAME_LOG_REGISTRATION)

        # warp moving landmarks to reference frame
        path_regist = os.path.join(path_dir, os.path.basename(path_im_move))
        dict_params = {
            'path_fiji': self.params['path_fiji'],
            'path_bsh': PATH_SCRIPT_WARP_LANDMARKS,
            'source': path_im_move, 'target': path_im_ref,
            'output': path_dir, 'warp': path_regist}
        # export source points to TXT
        pts_source = load_landmarks(path_lnds_move)
        save_landmarks(os.path.join(path_dir, NAME_LANDMARKS), pts_source)
        # execute transformation
        exec_commands(COMMAND_WARP_LANDMARKS % dict_params, path_logger=path_log)
        # load warped landmarks from TXT
        path_lnds = os.path.join(path_dir, NAME_LANDMARKS_WARPED)
        if os.path.isfile(path_lnds):
            points_warp = load_landmarks(path_lnds)
            path_lnds = os.path.join(path_dir, os.path.basename(path_lnds_move))
            save_landmarks(path_lnds, points_warp)
        else:
            path_lnds = None
        return None, path_regist, None, path_lnds
Ejemplo n.º 2
0
def filter_paired_landmarks(item, path_dataset, path_reference, col_source,
                            col_target):
    """ filter all relevant landmarks which were used and copy them to experiment

    The case is that in certain challenge stage users had provided just a subset
     of all image landmarks which could be laos shuffled. The idea is to filter identify
     all user used (provided in dataset) landmarks and filter them from temporary
     reference dataset.

    :param dict|Series item: experiment DataFrame
    :param str path_dataset: path to provided landmarks
    :param str path_reference: path to the complete landmark collection
    :param str col_source: column name of landmarks to be transformed
    :param str col_target: column name of landmarks to be compared
    :return tuple(float,ndarray,ndarray): match ratio, filtered ref and move landmarks

    >>> p_data = update_path('data-images')
    >>> p_csv = os.path.join(p_data, 'pairs-imgs-lnds_histol.csv')
    >>> df = pd.read_csv(p_csv)
    >>> ratio, lnds_ref, lnds_move = filter_paired_landmarks(dict(df.iloc[0]), p_data, p_data,
    ...     ImRegBenchmark.COL_POINTS_MOVE, ImRegBenchmark.COL_POINTS_REF)
    >>> ratio
    1.0
    >>> lnds_ref.shape == lnds_move.shape
    True
    """
    path_ref = update_path(item[col_source], pre_path=path_reference)
    if not os.path.isfile(path_ref):
        raise FileNotFoundError('missing landmarks: %s' % path_ref)
    path_load = update_path(item[col_source], pre_path=path_dataset)
    if not os.path.isfile(path_load):
        raise FileNotFoundError('missing landmarks: %s' % path_load)
    pairs = common_landmarks(load_landmarks(path_ref),
                             load_landmarks(path_load),
                             threshold=1)
    if not pairs.size:
        logging.warning(
            'there is not pairing between landmarks or dataset and user reference'
        )
        return 0., np.empty([0]), np.empty([0])

    pairs = sorted(pairs.tolist(), key=lambda p: p[1])
    ind_ref = np.asarray(pairs)[:, 0]
    nb_common = min([
        len(load_landmarks(update_path(item[col], pre_path=path_reference)))
        for col in (col_target, col_source)
    ])
    ind_ref = ind_ref[ind_ref < nb_common]

    path_lnd_ref = update_path(item[col_target], pre_path=path_reference)
    lnds_filter_ref = load_landmarks(path_lnd_ref)[ind_ref]
    path_lnd_move = update_path(item[col_source], pre_path=path_reference)
    lnds_filter_move = load_landmarks(path_lnd_move)[ind_ref]

    ratio_matches = len(ind_ref) / float(nb_common)
    if ratio_matches > 1:
        raise ValueError(
            'suspicious ratio for %i paired and %i common landmarks' %
            (len(pairs), nb_common))
    return ratio_matches, lnds_filter_ref, lnds_filter_move
Ejemplo n.º 3
0
def filter_landmarks(idx_row, path_output, path_dataset, path_reference):
    """ filter all relevant landmarks which were used and copy them to experiment

    :param (idx, {}|Series) idx_row: experiment DataFrame
    :param str path_output: path to output folder
    :param str path_dataset: path to provided landmarks
    :param str path_reference: path to the complete landmark collection
    :return (idx, float): record index and match ratio
    """
    idx, row = idx_row
    path_ref = update_path_(row[COL_POINTS_MOVE], path_reference)
    path_load = update_path_(row[COL_POINTS_MOVE], path_dataset)
    pairs = common_landmarks(load_landmarks(path_ref),
                             load_landmarks(path_load),
                             threshold=1)
    pairs = sorted(pairs.tolist(), key=lambda p: p[1])
    ind_ref = np.asarray(pairs)[:, 0]

    # moving and reference landmarks
    for col in [COL_POINTS_REF, COL_POINTS_MOVE]:
        path_in = update_path_(row[col], path_reference)
        path_out = update_path_(row[col], path_output)
        create_folder(os.path.dirname(path_out), ok_existing=True)
        save_landmarks(path_out, load_landmarks(path_in)[ind_ref])

    # save ratio of found landmarks
    len_lnds_ref = len(
        load_landmarks(update_path_(row[COL_POINTS_REF], path_reference)))
    ratio_matches = len(pairs) / float(len_lnds_ref)
    return idx, ratio_matches
Ejemplo n.º 4
0
    def _extract_warped_image_landmarks(self, item):
        """ get registration results - warped registered images and landmarks

        :param dict item: dictionary with registration params
        :return dict: paths to warped images/landmarks
        """
        logging.debug('.. warp the registered image and get landmarks')
        path_dir = self._get_path_reg_dir(item)
        path_im_ref, path_im_move, _, path_lnds_move = self._get_paths(
            item, prefer_pproc=False)
        path_log = os.path.join(path_dir, self.NAME_LOG_REGISTRATION)

        # warp moving landmarks to reference frame
        path_dir_out = os.path.join(path_dir, self.DIR_OUTPUTS)
        # name_ref, _ = os.path.splitext(os.path.basename(path_im_ref))
        name_move, _ = os.path.splitext(os.path.basename(path_im_move))
        path_img_warp = os.path.join(path_dir, os.path.basename(path_im_move))
        dict_params = {
            'exec_Fiji': self.params['exec_Fiji'],
            'path_bsh': self.PATH_SCRIPT_WARP_LANDMARKS,
            'source': path_im_move,
            'target': path_im_ref,
            'output': path_dir,
            # 'transf': os.path.join(path_dir_out, name_ref + '.xml'),
            'transf': os.path.join(path_dir_out, name_move + '.xml'),
            'warp': path_img_warp,
        }

        # export source points to TXT
        pts_source = load_landmarks(path_lnds_move)
        save_landmarks(os.path.join(path_dir, BmUnwarpJ.NAME_LANDMARKS),
                       pts_source)
        # execute transformation
        exec_commands(self.COMMAND_WARP_LANDMARKS % dict_params,
                      path_logger=path_log,
                      timeout=self.EXECUTE_TIMEOUT)
        # load warped landmarks from TXT
        path_lnds_warp = os.path.join(path_dir,
                                      BmUnwarpJ.NAME_LANDMARKS_WARPED)
        if os.path.isfile(path_lnds_warp):
            points_warp = load_landmarks(path_lnds_warp)
            path_lnds_warp = os.path.join(path_dir,
                                          os.path.basename(path_lnds_move))
            save_landmarks(path_lnds_warp, points_warp)
        else:
            path_lnds_warp = None

        # return results
        return {
            self.COL_IMAGE_MOVE_WARP: path_img_warp,
            self.COL_POINTS_MOVE_WARP: path_lnds_warp
        }
Ejemplo n.º 5
0
    def _extract_warped_image_landmarks(self, item):
        """ get registration results - warped registered images and landmarks

        :param dict item: dictionary with registration params
        :return dict: paths to warped images/landmarks
        """
        logging.debug('.. warp the registered image and get landmarks')
        path_dir = self._get_path_reg_dir(item)
        _, path_img_move, _, path_lnds_move = self._get_paths(item)
        path_lnds_warp, path_img_warp = None, None

        # load warped landmarks from TXT
        path_lnds = os.path.join(path_dir, self.NAME_FILE_LANDMARKS)
        if os.path.isfile(path_lnds):
            points_warp = load_landmarks(path_lnds)
            path_lnds_warp = os.path.join(path_dir, os.path.basename(path_lnds_move))
            save_landmarks(path_lnds_warp, points_warp)
            os.remove(path_lnds)

        path_regist = os.path.join(path_dir, self.NAME_FILE_IMAGE)
        if os.path.isfile(path_regist):
            name_img_move, _ = os.path.splitext(os.path.basename(path_img_move))
            _, ext_img_warp = os.path.splitext(self.NAME_FILE_IMAGE)
            path_img_warp = os.path.join(path_dir, name_img_move + ext_img_warp)
            os.rename(path_regist, path_img_warp)

        return {self.COL_IMAGE_MOVE_WARP: path_img_warp,
                self.COL_POINTS_MOVE_WARP: path_lnds_warp}
Ejemplo n.º 6
0
def _visual_image_ref_warp_lnds_move_warp(record, path_dataset=None,
                                          path_experiment=None):
    """ visualise the case with warped reference landmarks to the move frame

    :param {} record: row with the experiment
    :param str|None path_dataset: path to the dataset folder
    :param str|None path_experiment: path to the experiment folder
    :return obj|None:
    """
    assert COL_POINTS_REF_WARP in record and isinstance(record[COL_POINTS_REF_WARP], str), \
        'Missing registered image "%s"' % COL_POINTS_REF_WARP
    path_points_warp = update_path_(record[COL_POINTS_REF_WARP], path_experiment)
    if not os.path.isfile(path_points_warp):
        logging.warning('missing warped landmarks for: %r', dict(record))
        return

    points_ref, points_move, path_img_ref = _load_landmarks(record, path_dataset)

    points_warp = load_landmarks(path_points_warp)
    if not list(points_warp):
        return
    # draw image with landmarks
    image_move = load_image(update_path_(record[COL_IMAGE_MOVE], path_dataset))
    # image_warp = tl_io.load_image(row['Moving image, Transf.'])
    image = draw_image_points(image_move, points_warp)
    save_image(os.path.join(update_path_(record[COL_REG_DIR], path_experiment),
                            NAME_IMAGE_REF_POINTS_WARP), image)
    del image

    # visualise the landmarks move during registration
    image_ref = load_image(path_img_ref)
    fig = draw_images_warped_landmarks(image_ref, image_move, points_ref,
                                       points_move, points_warp)
    del image_ref, image_move
    return fig
Ejemplo n.º 7
0
    def _visual_image_move_warp_lnds_move_warp(cls, item, path_dataset=None, path_experiment=None):
        """ visualise the case with warped moving image and landmarks
        to the reference frame so they are simple to overlap

        :param dict item: row with the experiment
        :param str|None path_dataset: path to the dataset folder
        :param str|None path_experiment: path to the experiment folder
        :return obj|None:
        """
        assert isinstance(item.get(cls.COL_POINTS_MOVE_WARP, None), str), \
            'Missing registered points in "%s"' % cls.COL_POINTS_MOVE_WARP
        path_points_warp = update_path(item[cls.COL_POINTS_MOVE_WARP], pre_path=path_experiment)
        if not os.path.isfile(path_points_warp):
            logging.warning('missing warped landmarks for: %r', dict(item))
            return

        points_ref, points_move, path_img_ref = cls._load_landmarks(item, path_dataset)

        image_warp = cls._load_warped_image(item, path_experiment)
        points_warp = load_landmarks(path_points_warp)
        if not list(points_warp):
            return
        # draw image with landmarks
        image = draw_image_points(image_warp, points_warp)
        _path = update_path(item[cls.COL_REG_DIR], pre_path=path_experiment)
        save_image(os.path.join(_path, cls.NAME_IMAGE_MOVE_WARP_POINTS), image)
        del image

        # visualise the landmarks move during registration
        image_ref = load_image(path_img_ref)
        fig = draw_images_warped_landmarks(image_ref, image_warp, points_move, points_ref, points_warp)
        del image_ref, image_warp
        return fig
Ejemplo n.º 8
0
    def _extract_warped_image_landmarks(self, record):
        """ get registration results - warped registered images and landmarks

        :param record: {str: value}, dictionary with registration params
        :return (str, str, str, str): paths to
        """
        logging.debug('.. warp the registered image and get landmarks')
        path_dir = self._get_path_reg_dir(record)
        _, path_img_move, _, path_lnds_move = self._get_paths(record)
        path_lnds_warp, path_img_warp = None, None

        # load warped landmarks from TXT
        path_lnds = os.path.join(path_dir, NAME_FILE_LANDMARKS)
        if os.path.isfile(path_lnds):
            points_warp = load_landmarks(path_lnds)
            path_lnds_warp = os.path.join(path_dir, os.path.basename(path_lnds_move))
            save_landmarks(path_lnds_warp, points_warp)
            os.remove(path_lnds)

        path_regist = os.path.join(path_dir, NAME_FILE_IMAGE)
        if os.path.isfile(path_regist):
            name_img_move = os.path.splitext(os.path.basename(path_img_move))[0]
            ext_img_warp = os.path.splitext(NAME_FILE_IMAGE)[-1]
            path_img_warp = os.path.join(path_dir, name_img_move + ext_img_warp)
            os.rename(path_regist, path_img_warp)

        return None, path_img_warp, None, path_lnds_warp
Ejemplo n.º 9
0
def filter_export_landmarks(idx_row, path_output, path_dataset,
                            path_reference):
    """ filter all relevant landmarks which were used and copy them to experiment

    The case is that in certain challenge stage users had provided just a subset
     of all image landmarks which could be laos shuffled. The idea is to filter identify
     all user used (provided in dataset) landmarks and filter them from temporary
     reference dataset.

    :param tuple(idx,dict|Series) idx_row: experiment DataFrame
    :param str path_output: path to output folder
    :param str path_dataset: path to provided landmarks
    :param str path_reference: path to the complete landmark collection
    :return tuple(idx,float): record index and match ratio
    """
    idx, row = idx_row

    ratio_matches, lnds_filter_ref, lnds_filter_move = \
        filter_paired_landmarks(row, path_dataset, path_reference,
                                ImRegBenchmark.COL_POINTS_MOVE,
                                ImRegBenchmark.COL_POINTS_REF)

    # moving and reference landmarks
    for col, lnds_flt in [(ImRegBenchmark.COL_POINTS_REF, lnds_filter_ref),
                          (ImRegBenchmark.COL_POINTS_MOVE, lnds_filter_move)]:
        path_out = update_path(row[col], pre_path=path_output)
        create_folder(os.path.dirname(path_out), ok_existing=True)
        if os.path.isfile(path_out):
            assert np.array_equal(load_landmarks(path_out), lnds_flt), \
                'overwrite different set of landmarks'
        save_landmarks(path_out, lnds_flt)

    return idx, ratio_matches
Ejemplo n.º 10
0
    def _extract_warped_image_landmarks(self, item):
        """ get registration results - warped registered images and landmarks

        :param dict item: dictionary {str: value} with registration params
        :return dict: paths to results
        """
        path_dir = self._get_path_reg_dir(item)
        path_im_ref, path_im_move, _, path_lnds_move = self._get_paths(item)
        name_im_move, _ = os.path.splitext(os.path.basename(path_lnds_move))
        name_lnds_move, _ = os.path.splitext(os.path.basename(path_lnds_move))

        # simplified version of landmarks
        lnds = load_landmarks(path_lnds_move)
        path_lnds_warp = os.path.join(path_dir, name_lnds_move + '.csv')
        # https://github.com/ANTsX/ANTs/issues/733#issuecomment-472049427
        pd.DataFrame(lnds * -1, columns=['x', 'y']).to_csv(path_lnds_warp, index=None)

        # list output transformations
        tf_elast_inv = sorted(glob.glob(os.path.join(path_dir, 'trans*InverseWarp.nii*')))
        tf_elast = [os.path.join(os.path.dirname(p), os.path.basename(p).replace('Inverse', ''))
                    for p in tf_elast_inv]
        tf_affine = sorted(glob.glob(os.path.join(path_dir, 'trans*GenericAffine.mat')))
        # generate commands
        cmd_warp_img = self.COMMAND_WARP_IMAGE % {
            'antsApplyTransforms': self.exec_transf_img,
            'output': path_dir,
            'img_target': item[self.COL_IMAGE_REF_NII],
            'img_source': item[self.COL_IMAGE_MOVE_NII],
            'transfs': ' -t '.join(sorted(tf_affine + tf_elast, reverse=True)),
            'img_name': name_im_move
        }
        cmd_warp_pts = self.COMMAND_WARP_POINTS % {
            'antsApplyTransformsToPoints': self.exec_transf_pts,
            'output': path_dir,
            'path_points': path_lnds_warp,
            'transfs': ' -t '.join(['[ %s , 1]' % tf if 'Affine' in tf else tf
                                    for tf in sorted(tf_affine + tf_elast_inv)]),
            'pts_name': name_lnds_move
        }
        # execute commands
        exec_commands([cmd_warp_img, cmd_warp_pts],
                      path_logger=os.path.join(path_dir, 'warping.log'))

        path_im_nii = os.path.join(path_dir, name_im_move + '.nii')
        if os.path.isfile(path_im_nii):
            path_img_warp = convert_image_from_nifti(path_im_nii)
        else:
            path_img_warp = None

        if os.path.isfile(path_lnds_warp):
            lnds = pd.read_csv(path_lnds_warp, index_col=None).values
            save_landmarks(path_lnds_warp, lnds * -1)
        else:
            path_lnds_warp = None

        return {self.COL_IMAGE_MOVE_WARP: path_img_warp,
                self.COL_POINTS_MOVE_WARP: path_lnds_warp}
Ejemplo n.º 11
0
def compute_registration_statistic(idx_row, df_experiments,
                                   path_dataset=None, path_experiment=None):
    """ after successful registration load initial nad estimated landmarks
    afterwords compute various statistic for init, and finalNoBmTemplatene alignment

    :param (int, dict) idx_row: tow from iterated table
    :param DF df_experiments: DataFrame with experiments
    :param str|None path_dataset: path to the dataset folder
    :param str|None path_experiment: path to the experiment folder
    """
    idx, row = idx_row
    row = dict(row)  # convert even series to dictionary
    points_ref, points_move, path_img_ref = _load_landmarks(row, path_dataset)
    img_diag = _image_diag(row, path_img_ref)
    df_experiments.loc[idx, COL_IMAGE_DIAGONAL] = img_diag

    # compute landmarks statistic
    compute_registration_accuracy(df_experiments, idx, points_ref, points_move,
                                  'init', img_diag, wo_affine=False)

    # load transformed landmarks
    if (COL_POINTS_MOVE_WARP not in row) and (COL_POINTS_REF_WARP not in row):
        logging.error('Statistic: no output landmarks')
        return

    # define what is the target and init state according to the experiment results
    is_move_warp = COL_POINTS_MOVE_WARP in row and row[COL_POINTS_MOVE_WARP]
    points_init = points_move if is_move_warp else points_ref
    points_target = points_ref if is_move_warp else points_move
    col_lnds_warp = COL_POINTS_MOVE_WARP if is_move_warp else COL_POINTS_REF_WARP

    # load landmarks
    path_landmarks = update_path_(row[col_lnds_warp], path_experiment)
    if path_landmarks and os.path.isfile(path_landmarks):
        points_warp = load_landmarks(path_landmarks)
    else:
        logging.warning('Invalid path to the landmarks: "%s" <- "%s"',
                        path_landmarks, row[col_lnds_warp])
        return

    # compute Affine statistic
    affine_diff = compute_affine_transf_diff(points_init, points_target, points_warp)
    for name in affine_diff:
        df_experiments.loc[idx, name] = affine_diff[name]

    # compute landmarks statistic
    compute_registration_accuracy(df_experiments, idx, points_target, points_warp,
                                  'elastic', img_diag, wo_affine=True)
    # compute landmarks statistic
    compute_registration_accuracy(df_experiments, idx, points_target, points_warp,
                                  'final', img_diag, wo_affine=False)
    row_ = dict(df_experiments.loc[idx])
    if 'TRE Mean (final)' in row_:
        robust = row_['TRE Mean (final)'] < row_['TRE Mean (init)']
        df_experiments.loc[idx, COL_ROBUSTNESS] = int(robust)
Ejemplo n.º 12
0
    def _extract_warped_image_landmarks(self, item):
        """ get registration results - warped registered images and landmarks

        :param dict item: dictionary with registration params
        :return dict: paths to warped images/landmarks
        """
        path_reg_dir = self._get_path_reg_dir(item)
        _, path_im_move, path_lnds_ref, _ = self._get_paths(item)
        # convert MHD image
        path_img_ = convert_image_from_mhd(os.path.join(
            path_reg_dir, 'output.mhd'),
                                           scaling=item.get('scaling', 1.))
        img_name, _ = os.path.splitext(os.path.basename(path_im_move))
        _, img_ext = os.path.splitext(os.path.basename(path_img_))
        path_img_warp = path_img_.replace('output' + img_ext,
                                          img_name + img_ext)
        shutil.move(path_img_, path_img_warp)

        # load transform and warp landmarks
        # lnds_move = load_landmarks(path_lnds_move)
        lnds_ref = load_landmarks(path_lnds_ref)
        lnds_name = os.path.basename(path_lnds_ref)
        path_lnds_warp = os.path.join(path_reg_dir, lnds_name)
        if lnds_ref is None:
            raise ValueError('missing landmarks to be transformed "%s"' %
                             lnds_name)

        # down-scale landmarks if defined
        lnds_ref = lnds_ref / item.get('scaling', 1.)
        # extract deformation
        path_deform_x = os.path.join(path_reg_dir, 'output_x.mhd')
        path_deform_y = os.path.join(path_reg_dir, 'output_y.mhd')
        try:
            shift = self.extract_landmarks_shift_from_mhd(
                path_deform_x, path_deform_y, lnds_ref)
        except Exception:
            logging.exception(path_reg_dir)
            shift = np.zeros(lnds_ref.shape)

        # lnds_warp = lnds_move - shift
        lnds_warp = lnds_ref + shift
        # upscale landmarks if defined
        lnds_warp = lnds_warp * item.get('scaling', 1.)
        save_landmarks(path_lnds_warp, lnds_warp)

        # return formatted results
        return {
            self.COL_IMAGE_MOVE_WARP: path_img_warp,
            self.COL_POINTS_REF_WARP: path_lnds_warp,
        }
Ejemplo n.º 13
0
    def _visual_image_move_warp_lnds_ref_warp(cls,
                                              item,
                                              path_dataset=None,
                                              path_experiment=None):
        """ visualise the case with warped reference landmarks to the move frame

        :param dict item: row with the experiment
        :param str|None path_dataset: path to the dataset folder
        :param str|None path_experiment: path to the experiment folder
        :return obj|None:
        """
        if not isinstance(item.get(cls.COL_POINTS_REF_WARP), str):
            raise ValueError('Missing registered points in "%s"' %
                             cls.COL_POINTS_REF_WARP)
        path_points_warp = update_path(item[cls.COL_POINTS_REF_WARP],
                                       pre_path=path_experiment)
        if not os.path.isfile(path_points_warp):
            logging.warning('missing warped landmarks for: %r', dict(item))
            return

        points_ref, points_move, path_img_ref = cls._load_landmarks(
            item, path_dataset)

        points_warp = load_landmarks(path_points_warp)
        if not list(points_warp):
            return
        # draw image with landmarks
        image_move = load_image(
            update_path(item[cls.COL_IMAGE_MOVE], pre_path=path_dataset))
        image = draw_image_points(image_move, points_warp)
        _path = update_path(item[cls.COL_REG_DIR], pre_path=path_experiment)
        save_image(os.path.join(_path, cls.NAME_IMAGE_REF_POINTS_WARP), image)
        del image

        image_ref = load_image(path_img_ref)
        image_warp = cls._load_warped_image(item, path_experiment)
        image = overlap_two_images(image_ref, image_warp)
        _path = update_path(item[cls.COL_REG_DIR], pre_path=path_experiment)
        save_image(os.path.join(_path, cls.NAME_IMAGE_REF_WARP), image)
        del image, image_warp

        # visualise the landmarks move during registration
        fig = draw_images_warped_landmarks(image_ref, image_move, points_ref,
                                           points_move, points_warp)
        return fig
Ejemplo n.º 14
0
    def _extract_warped_image_landmarks(self, item):
        """ get registration results - warped registered images and landmarks

        :param dict item: dictionary with registration params
        :return dict: paths to warped images/landmarks
        """
        path_dir = self._get_path_reg_dir(item)
        path_img_ref, path_img_move, path_lnds_ref, path_lnds_move = self._get_paths(
            item)
        path_img_warp, path_lnds_warp = None, None
        path_log = os.path.join(path_dir, self.NAME_LOG_REGISTRATION)

        name_lnds = os.path.basename(path_lnds_ref)
        path_lnds_local = save_landmarks_pts(os.path.join(path_dir, name_lnds),
                                             load_landmarks(path_lnds_ref))

        # warping the image and points
        cmd = self.COMMAND_TRANSFORMATION % {
            'exec_transformix': self.exec_transformix,
            'source': path_img_move,
            'output': path_dir,
            'landmarks': path_lnds_local,
        }
        exec_commands(cmd, path_logger=path_log, timeout=self.EXECUTE_TIMEOUT)

        # if there is an output image copy it
        path_im_out = glob.glob(os.path.join(path_dir, self.NAME_IMAGE_WARPED))
        if path_im_out:
            path_im_out = sorted(path_im_out)[0]
            _, ext_img = os.path.splitext(path_im_out)
            name_img, _ = os.path.splitext(os.path.basename(path_img_move))
            path_img_warp = os.path.join(path_dir, name_img + ext_img)
            os.rename(path_im_out, path_img_warp)

        path_lnds_out = os.path.join(path_dir, self.NAME_LNDS_WARPED)
        if os.path.isfile(path_lnds_out):
            path_lnds_warp = os.path.join(path_dir, name_lnds)
            lnds = self.parse_warped_points(path_lnds_out)
            save_landmarks(path_lnds_warp, lnds)

        return {
            self.COL_IMAGE_MOVE_WARP: path_img_warp,
            self.COL_POINTS_REF_WARP: path_lnds_warp
        }
Ejemplo n.º 15
0
    def _extract_warped_image_landmarks(self, item):
        """ get registration results - warped registered images and landmarks

        :param dict item: dictionary with registration params
        :return dict: paths to warped images/landmarks
        """
        path_reg_dir = self._get_path_reg_dir(item)
        _, path_im_move, path_lnds_ref, path_lnds_move = self._get_paths(item)

        path_img_warp = os.path.join(path_reg_dir,
                                     os.path.basename(path_im_move))
        shutil.move(os.path.join(path_reg_dir, 'output.jpeg'), path_img_warp)

        # load transform and warp landmarks
        lnds_ = load_landmarks(path_lnds_ref)
        # this was in case you run inverse registration co you could warp the landmarks directly
        # lnds_ = load_landmarks(path_lnds_move)
        lnds_name = os.path.basename(path_lnds_move)
        path_lnds_warp = os.path.join(path_reg_dir, lnds_name)
        if lnds_ is None:
            raise ValueError('missing landmarks to be transformed "%s"' %
                             lnds_name)

        # extract deformation
        path_deform_x = os.path.join(path_reg_dir, 'output_field_x.nii.gz')
        path_deform_y = os.path.join(path_reg_dir, 'output_field_y.nii.gz')
        try:
            shift = self.extract_landmarks_shift_from_nifty(
                path_deform_x, path_deform_y, lnds_)
        except Exception:
            logging.exception(path_reg_dir)
            shift = np.zeros(lnds_.shape)

        lnds_warp = lnds_ + shift
        save_landmarks(path_lnds_warp, lnds_warp)

        # return formatted results
        return {
            self.COL_IMAGE_MOVE_WARP: path_img_warp,
            self.COL_POINTS_REF_WARP: path_lnds_warp,
        }
Ejemplo n.º 16
0
    def compute_registration_statistic(
        cls,
        idx_row,
        df_experiments,
        path_dataset=None,
        path_experiment=None,
        path_reference=None,
    ):
        """ after successful registration load initial nad estimated landmarks
        afterwords compute various statistic for init, and final alignment

        :param tuple(int,dict) df_row: row from iterated table
        :param DF df_experiments: DataFrame with experiments
        :param str|None path_dataset: path to the provided dataset folder
        :param str|None path_reference: path to the complete landmark collection folder
        :param str|None path_experiment: path to the experiment folder
        """
        idx, row = idx_row
        row = dict(row)  # convert even series to dictionary
        # load common landmarks and image size
        points_ref, points_move, path_img_ref = cls._load_landmarks(
            row, path_dataset)
        img_diag = cls._image_diag(row, path_img_ref)
        df_experiments.loc[idx, cls.COL_IMAGE_DIAGONAL] = img_diag

        # compute landmarks statistic
        cls.compute_registration_accuracy(df_experiments,
                                          idx,
                                          points_ref,
                                          points_move,
                                          'init',
                                          img_diag,
                                          wo_affine=False)

        # define what is the target and init state according to the experiment results
        use_move_warp = isinstance(row.get(cls.COL_POINTS_MOVE_WARP), str)
        if use_move_warp:
            points_init, points_target = points_move, points_ref
            col_source, col_target = cls.COL_POINTS_MOVE, cls.COL_POINTS_REF
            col_lnds_warp = cls.COL_POINTS_MOVE_WARP
        else:
            points_init, points_target = points_ref, points_move
            col_lnds_warp = cls.COL_POINTS_REF_WARP
            col_source, col_target = cls.COL_POINTS_REF, cls.COL_POINTS_MOVE

        # optional filtering
        if path_reference:
            ratio, points_target, _ = \
                filter_paired_landmarks(row, path_dataset, path_reference, col_source, col_target)
            df_experiments.loc[idx, COL_PAIRED_LANDMARKS] = np.round(ratio, 2)

        # load transformed landmarks
        if (cls.COL_POINTS_MOVE_WARP not in row) and (cls.COL_POINTS_REF_WARP
                                                      not in row):
            logging.error('Statistic: no output landmarks')
            return

        # check if there are reference landmarks
        if points_target is None:
            logging.warning(
                'Missing landmarks in "%s"',
                cls.COL_POINTS_REF if use_move_warp else cls.COL_POINTS_MOVE)
            return
        # load warped landmarks
        path_lnds_warp = update_path(row[col_lnds_warp],
                                     pre_path=path_experiment)
        if path_lnds_warp and os.path.isfile(path_lnds_warp):
            points_warp = load_landmarks(path_lnds_warp)
            points_warp = np.nan_to_num(points_warp)
        else:
            logging.warning('Invalid path to the landmarks: "%s" <- "%s"',
                            path_lnds_warp, row[col_lnds_warp])
            return
        df_experiments.loc[idx, cls.COL_NB_LANDMARKS_INPUT] = min(
            len(points_init), len(points_target))
        df_experiments.loc[idx, cls.COL_NB_LANDMARKS_WARP] = len(points_warp)

        # compute Affine statistic
        affine_diff = compute_affine_transf_diff(points_init, points_target,
                                                 points_warp)
        for name in affine_diff:
            df_experiments.loc[idx, name] = affine_diff[name]

        # compute landmarks statistic
        cls.compute_registration_accuracy(df_experiments,
                                          idx,
                                          points_target,
                                          points_warp,
                                          'elastic',
                                          img_diag,
                                          wo_affine=True)
        # compute landmarks statistic
        cls.compute_registration_accuracy(df_experiments,
                                          idx,
                                          points_target,
                                          points_warp,
                                          'target',
                                          img_diag,
                                          wo_affine=False)
        row_ = dict(df_experiments.loc[idx])
        # compute the robustness
        if 'TRE Mean' in row_:
            df_experiments.loc[idx, cls.COL_ROBUSTNESS] = \
                compute_tre_robustness(points_target, points_init, points_warp)
Ejemplo n.º 17
0
 def _load_landmarks(cls, item, path_dataset):
     path_img_ref, _, path_lnds_ref, path_lnds_move = \
         [update_path(item[col], pre_path=path_dataset) for col in cls.COVER_COLUMNS]
     points_ref = load_landmarks(path_lnds_ref)
     points_move = load_landmarks(path_lnds_move)
     return points_ref, points_move, path_img_ref
Ejemplo n.º 18
0
    def compute_registration_statistic(cls,
                                       idx_row,
                                       df_experiments,
                                       path_dataset=None,
                                       path_experiment=None):
        """ after successful registration load initial nad estimated landmarks
        afterwords compute various statistic for init, and final alignment

        :param tuple(int,dict) df_row: row from iterated table
        :param DF df_experiments: DataFrame with experiments
        :param str|None path_dataset: path to the dataset folder
        :param str|None path_experiment: path to the experiment folder
        """
        idx, row = idx_row
        row = dict(row)  # convert even series to dictionary
        points_ref, points_move, path_img_ref = cls._load_landmarks(
            row, path_dataset)
        img_diag = cls._image_diag(row, path_img_ref)
        df_experiments.loc[idx, cls.COL_IMAGE_DIAGONAL] = img_diag

        # compute landmarks statistic
        cls.compute_registration_accuracy(df_experiments,
                                          idx,
                                          points_ref,
                                          points_move,
                                          'init',
                                          img_diag,
                                          wo_affine=False)

        # load transformed landmarks
        if (cls.COL_POINTS_MOVE_WARP not in row) and (cls.COL_POINTS_REF_WARP
                                                      not in row):
            logging.error('Statistic: no output landmarks')
            return

        # define what is the target and init state according to the experiment results
        is_move_warp = isinstance(row.get(cls.COL_POINTS_MOVE_WARP, None), str)
        points_init = points_move if is_move_warp else points_ref
        points_target = points_ref if is_move_warp else points_move
        col_lnds_warp = cls.COL_POINTS_MOVE_WARP if is_move_warp else cls.COL_POINTS_REF_WARP

        # check if there are reference landmarks
        if points_target is None:
            logging.warning(
                'Missing landmarks in "%s"',
                cls.COL_POINTS_REF if is_move_warp else cls.COL_POINTS_MOVE)
            return
        # load warped landmarks
        path_lnds_wapr = update_path(row[col_lnds_warp],
                                     pre_path=path_experiment)
        if path_lnds_wapr and os.path.isfile(path_lnds_wapr):
            points_warp = load_landmarks(path_lnds_wapr)
            points_warp = np.nan_to_num(points_warp)
        else:
            logging.warning('Invalid path to the landmarks: "%s" <- "%s"',
                            path_lnds_wapr, row[col_lnds_warp])
            return

        # compute Affine statistic
        affine_diff = compute_affine_transf_diff(points_init, points_target,
                                                 points_warp)
        for name in affine_diff:
            df_experiments.loc[idx, name] = affine_diff[name]

        # compute landmarks statistic
        cls.compute_registration_accuracy(df_experiments,
                                          idx,
                                          points_target,
                                          points_warp,
                                          'elastic',
                                          img_diag,
                                          wo_affine=True)
        # compute landmarks statistic
        cls.compute_registration_accuracy(df_experiments,
                                          idx,
                                          points_target,
                                          points_warp,
                                          'target',
                                          img_diag,
                                          wo_affine=False)
        row_ = dict(df_experiments.loc[idx])
        # compute the robustness
        if 'TRE Mean' in row_:
            df_experiments.loc[idx, cls.COL_ROBUSTNESS] = \
                compute_tre_robustness(points_target, points_init, points_warp)
Ejemplo n.º 19
0
def _load_landmarks(record, path_dataset):
    path_img_ref, _, path_lnds_ref, path_lnds_move = \
        [update_path_(record[col], path_dataset) for col in COVER_COLUMNS]
    points_ref = load_landmarks(path_lnds_ref)
    points_move = load_landmarks(path_lnds_move)
    return points_ref, points_move, path_img_ref