Example #1
0
def compute_affine_transf_diff(points_ref, points_init, points_est):
    """ compute differences between initial state and estimated results

    :param points_ref: np.array<nb_points, dim>
    :param points_init: np.array<nb_points, dim>
    :param points_est: np.array<nb_points, dim>
    :return:

    >>> points_ref = np.array([[1, 2], [3, 4], [2, 1]])
    >>> points_init = np.array([[3, 4], [1, 2], [2, 1]])
    >>> points_est = np.array([[3, 4], [2, 1], [1, 2]])
    >>> diff = compute_affine_transf_diff(points_ref, points_init, points_est)
    >>> import pandas as pd
    >>> pd.Series(diff).sort_index()  # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
    Affine rotation Diff        -8.97...
    Affine scale X Diff         -0.08...
    Affine scale Y Diff         -0.20...
    Affine shear Diff           -1.09...
    Affine translation X Diff   -1.25...
    Affine translation Y Diff    1.25...
    dtype: float64

    Wrong input:
    >>> compute_affine_transf_diff(None, np.array([[1, 2], [3, 4], [2, 1]]), None)
    {}
    """
    if not all(pts is not None and list(pts)
               for pts in [points_ref, points_init, points_est]):
        return {}

    points_ref = np.nan_to_num(points_ref)
    mtx_init = estimate_affine_transform(points_ref,
                                         np.nan_to_num(points_init))[0]
    affine_init = get_affine_components(np.asarray(mtx_init))
    mtx_est = estimate_affine_transform(points_ref,
                                        np.nan_to_num(points_est))[0]
    affine_estim = get_affine_components(np.asarray(mtx_est))

    diff = {
        'Affine %s %s Diff' % (n, c):
        (np.array(affine_estim[n]) - np.array(affine_init[n]))[i]
        for n in ['translation', 'scale'] for i, c in enumerate(['X', 'Y'])
    }
    diff.update({
        'Affine %s Diff' % n: norm_angle(affine_estim[n] - affine_init[n],
                                         deg=True)
        for n in ['rotation']
    })
    diff.update({
        'Affine %s Diff' % n: affine_estim[n] - affine_init[n]
        for n in ['shear']
    })
    return diff
Example #2
0
def estimate_landmark_outliers(points_0, points_1, std_coef=3):
    """ estimated landmark outliers after affine alignment

    :param ndarray points_0: set of points
    :param ndarray points_1: set of points
    :param float std_coef: range of STD error to be assumed as inlier
    :return ([bool], [float]): vector or binary outliers and computed error

    >>> lnds0 = np.array([[4., 116.], [4., 4.], [26., 4.], [26., 116.],
    ...                   [18, 45], [0, 0], [-12, 8], [1, 1]])
    >>> lnds1 = np.array([[61., 56.], [61., -56.], [39., -56.], [39., 56.],
    ...                   [47., -15.], [65., -60.], [77., -52.], [0, 0]])
    >>> out, err = estimate_landmark_outliers(lnds0, lnds1, std_coef=3)
    >>> out.astype(int)
    array([0, 0, 0, 0, 0, 0, 0, 1])
    >>> np.round(err, 2)  # doctest: +NORMALIZE_WHITESPACE
    array([  1.02,  16.78,  10.29,   5.47,   6.88,  18.52,  20.94,  68.96])
    """
    nb = min(len(points_0), len(points_1))
    _, _, points_0w, _ = estimate_affine_transform(points_0[:nb],
                                                   points_1[:nb])
    err = np.sqrt(np.sum((points_1[:nb] - points_0w)**2, axis=1))
    norm = np.std(err) * std_coef
    out = (err > norm)
    return out, err
Example #3
0
def compute_registration_accuracy(df_experiments, idx, points1, points2,
                                  state='', img_diag=None, wo_affine=False):
    """ compute statistic on two points sets

    :param DF df_experiments: DataFrame with experiments
    :param int idx: index of tha particular record
    :param points1: np.array<nb_points, dim>
    :param points2: np.array<nb_points, dim>
    :param str state: whether it was before of after registration
    :param float img_diag: target image diagonal
    :param bool wo_affine: without affine transform, assume only local/elastic deformation
    """
    if wo_affine and points1 is not None and points2 is not None:
        # removing the affine transform and assume only local/elastic deformation
        _, _, points1, _ = estimate_affine_transform(points1, points2)

    _, stat = compute_points_dist_statistic(points1, points2)
    if img_diag is not None:
        df_experiments.at[idx, COL_IMAGE_DIAGONAL] = img_diag
    # update particular idx
    for name in (n for n in stat if n not in ['overlap points']):
        if img_diag is not None:
            df_experiments.at[idx, 'rTRE %s (%s)' % (name, state)] = stat[name] / img_diag
        df_experiments.at[idx, 'TRE %s (%s)' % (name, state)] = stat[name]
    for name in ['overlap points']:
        df_experiments.at[idx, '%s (%s)' % (name, state)] = stat[name]
Example #4
0
    def compute_registration_accuracy(
        cls,
        df_experiments,
        idx,
        points1,
        points2,
        state='',
        img_diag=None,
        wo_affine=False,
    ):
        """ compute statistic on two points sets

        IRE - Initial Registration Error
        TRE - Target Registration Error

        :param DF df_experiments: DataFrame with experiments
        :param int idx: index of tha particular record
        :param ndarray points1: np.array<nb_points, dim>
        :param ndarray points2: np.array<nb_points, dim>
        :param str state: whether it was before of after registration
        :param float img_diag: target image diagonal
        :param bool wo_affine: without affine transform, assume only local/elastic deformation
        """
        if wo_affine and points1 is not None and points2 is not None:
            # removing the affine transform and assume only local/elastic deformation
            _, _, points1, _ = estimate_affine_transform(points1, points2)

        _, stats = compute_target_regist_error_statistic(points1, points2)
        if img_diag is not None:
            df_experiments.at[idx, cls.COL_IMAGE_DIAGONAL] = img_diag
        # update particular idx
        for n_stat in (n for n in stats if n not in ['overlap points']):
            # if it not one of the simplified names
            if state and state not in ('init', 'final', 'target'):
                name = 'TRE %s (%s)' % (n_stat, state)
            else:
                # for initial ise IRE, else TRE
                name = '%s %s' % ('IRE' if state == 'init' else 'TRE', n_stat)
            if img_diag is not None:
                df_experiments.at[idx, 'r%s' % name] = stats[n_stat] / img_diag
            df_experiments.at[idx, name] = stats[n_stat]
        for n_stat in ['overlap points']:
            df_experiments.at[idx, '%s (%s)' % (n_stat, state)] = stats[n_stat]
Example #5
0
def expand_random_warped_landmarks(names_lnds, names_lnds_new, nb_total):
    """ add some extra point which are randomly sampled in the first sample
    and warped to the other images using estimated affine transform

    :param dict names_lnds: mapping to ndarray of the original landmarks
    :param dict names_lnds_new: mapping to ndarray of the generated landmarks
    :param int nb_total: total number of point - landmarks
    :return dict: mapping to ndarray
    """
    # estimate then number of required points
    nb_min_new = min(map(len, names_lnds_new.values()))
    nb_extras = nb_total - nb_min_new
    if nb_extras <= 0:
        return names_lnds_new

    ref_name = sorted(names_lnds)[0]
    ref_points = names_lnds[ref_name]
    points_extra = generate_random_points_inside(ref_points, nb_extras)

    for name in filter(lambda n: n != ref_name, names_lnds):
        # prepare the points
        nb_common = min([len(names_lnds[ref_name]), len(names_lnds[name])])
        pts1 = names_lnds[ref_name][:nb_common]
        pts2 = names_lnds[name][:nb_common]
        # estimate the internal affine transformation
        matrix, _, _, _ = estimate_affine_transform(pts1, pts2)
        points_warp = transform_points(points_extra, matrix)
        # insert the warped points
        names_lnds_new[name] = np.vstack(
            [names_lnds_new[name][:nb_min_new], points_warp])
    # insert also the reference sample
    names_lnds_new[ref_name] = np.vstack(
        [names_lnds_new[ref_name][:nb_min_new], points_extra])

    # reorder landmarks but equally in all sets
    reorder = list(range(nb_total))
    np.random.shuffle(reorder)
    names_lnds_new = {n: names_lnds_new[n][reorder] for n in names_lnds_new}
    return names_lnds_new