Ejemplo n.º 1
0
def plot_keypoint_scales(hs, fnum=1):
    print('[dev] plot_keypoint_scales()')
    cx2_kpts = hs.feats.cx2_kpts
    if len(cx2_kpts) == 0:
        hs.refresh_features()
        cx2_kpts = hs.feats.cx2_kpts
    cx2_nFeats = map(len, cx2_kpts)
    kpts = np.vstack(cx2_kpts)
    print('[dev] --- LaTeX --- ')
    _printopts = np.get_printoptions()
    np.set_printoptions(precision=3)
    print(util_latex.latex_scalar(r'\# keypoints, ', len(kpts)))
    print(util_latex.latex_mystats(r'\# keypoints per image', cx2_nFeats))
    scales = ktool.get_scales(kpts)
    scales = np.array(sorted(scales))
    print(util_latex.latex_mystats(r'keypoint scale', scales))
    np.set_printoptions(**_printopts)
    print('[dev] ---/LaTeX --- ')
    #
    df2.figure(fnum=fnum, docla=True, title='sorted scales')
    df2.plot(scales)
    df2.adjust_subplots_safe()
    #ax = df2.gca()
    #ax.set_yscale('log')
    #ax.set_xscale('log')
    #
    fnum += 1
    df2.figure(fnum=fnum, docla=True, title='hist scales')
    df2.show_histogram(scales, bins=20)
    df2.adjust_subplots_safe()
    #ax = df2.gca()
    #ax.set_yscale('log')
    #ax.set_xscale('log')
    return fnum
Ejemplo n.º 2
0
def get_keypoint_stats(ibs):
    from utool import util_latex as pytex
    #from hsdev import dev_consistency
    #dev_consistency.check_keypoint_consistency(ibs)
    # Keypoint stats
    #ibs.refresh_features()
    from ibeis.control.IBEISControl import IBEISController
    assert(isinstance(ibs, IBEISController))
    valid_aids = np.array(ibs.get_valid_aids())
    cx2_kpts = ibs.get_annot_kpts(valid_aids)
    #cx2_kpts = ibs.feats.cx2_kpts
    # Check cx2_kpts
    cx2_nFeats = list(map(len, cx2_kpts))
    kpts = np.vstack(cx2_kpts)
    print('[dbinfo] --- LaTeX --- ')
    _printopts = np.get_printoptions()
    np.set_printoptions(precision=3)
    scales = ktool.get_scales(kpts)
    scales = np.array(sorted(scales))
    tex_scale_stats = pytex.latex_mystats(r'kpt scale', scales)
    tex_nKpts       = pytex.latex_scalar(r'\# kpts', len(kpts))
    tex_kpts_stats  = pytex.latex_mystats(r'\# kpts/chip', cx2_nFeats)
    print(tex_nKpts)
    print(tex_kpts_stats)
    print(tex_scale_stats)
    np.set_printoptions(**_printopts)
    print('[dbinfo] ---/LaTeX --- ')
    return (tex_nKpts, tex_kpts_stats, tex_scale_stats)
Ejemplo n.º 3
0
def kp_info(kp):
    kpts = np.array([kp])
    xy_str = ktool.get_xy_strs(kpts)[0]
    shape_str = ktool.get_shape_strs(kpts)[0]
    ori_ = ktool.get_oris(kpts)[0]
    ori_str = 'ori=%.2f' % ori_
    scale = ktool.get_scales(kpts)[0]
    return xy_str, shape_str, scale, ori_str
Ejemplo n.º 4
0
def kp_info(kp):
    kpts = np.array([kp])
    xy_str    = ktool.get_xy_strs(kpts)[0]
    shape_str = ktool.get_shape_strs(kpts)[0]
    ori_ = ktool.get_oris(kpts)[0]
    ori_str = 'ori=%.2f' % ori_
    scale = ktool.get_scales(kpts)[0]
    return xy_str, shape_str, scale, ori_str
Ejemplo n.º 5
0
    def show_kpts_(fnum, pnum, kpts, vecs, title):
        print('--------')
        print('show_kpts: %r.%r' % (fnum, pnum))
        print('kpts  = %r' % (kpts,))
        print('scales = %r' % ktool.get_scales(kpts))
        # FIXME: this exists in ibeis. move to vtool
        #dev_consistency.check_vecs(vecs3)

        show_keypoints(imgL, kpts, sifts=vecs, pnum=pnum, rect=True,
                       ori=True, fnum=fnum, title=title, ell_alpha=1)
Ejemplo n.º 6
0
    def show_kpts_(fnum, pnum, kpts, vecs, title):
        print('--------')
        print('show_kpts: %r.%r' % (fnum, pnum))
        print('kpts  = %r' % (kpts, ))
        print('scales = %r' % ktool.get_scales(kpts))
        # FIXME: this exists in ibeis. move to vtool
        #dev_consistency.check_vecs(vecs3)

        show_keypoints(imgL,
                       kpts,
                       sifts=vecs,
                       pnum=pnum,
                       rect=True,
                       ori=True,
                       fnum=fnum,
                       title=title,
                       ell_alpha=1)
Ejemplo n.º 7
0
    def measure_feat_pairs(allres, orgtype='top_true'):
        print('Measure ' + orgtype + ' pairs')
        orgres = allres.__dict__[orgtype]
        entropy_list = []
        scale_list = []
        score_list = []
        lbl = 'Measuring ' + orgtype + ' pair '
        fmt_str = utool.make_progress_fmt_str(len(orgres), lbl)
        rank_skips = []
        gt_skips = []
        for ix, (qrid, rid, score, rank) in enumerate(orgres.iter()):
            utool.print_(fmt_str % (ix + 1, ))
            # Skip low ranks
            if rank > 5:
                rank_skips.append(qrid)
                continue
            other_rids = ibs.get_other_indexed_rids(qrid)
            # Skip no groundtruth
            if len(other_rids) == 0:
                gt_skips.append(qrid)
                continue
            qres = qrid2_qres[qrid]
            # Get matching feature indexes
            fm = qres.cx2_fm[rid]
            # Get their scores
            fs = qres.cx2_fs[rid]
            # Get matching descriptors
            printDBG('\nfm.shape=%r' % (fm.shape, ))
            desc1 = cx2_desc[qrid][fm[:, 0]]
            desc2 = cx2_desc[rid][fm[:, 1]]
            # Get matching keypoints
            kpts1 = cx2_kpts[qrid][fm[:, 0]]
            kpts2 = cx2_kpts[rid][fm[:, 1]]
            # Get their scale
            scale1_m = ktool.get_scales(kpts1)
            scale2_m = ktool.get_scales(kpts2)
            # Get their entropy
            entropy1 = descriptor_entropy(desc1, bw_factor=1)
            entropy2 = descriptor_entropy(desc2, bw_factor=1)
            # Append to results
            entropy_tup = np.array(zip(entropy1, entropy2))
            scale_tup = np.array(zip(scale1_m, scale2_m))
            entropy_tup = entropy_tup.reshape(len(entropy_tup), 2)
            scale_tup = scale_tup.reshape(len(scale_tup), 2)
            entropy_list.append(entropy_tup)
            scale_list.append(scale_tup)
            score_list.append(fs)
        print('Skipped %d total.' % (len(rank_skips) + len(gt_skips), ))
        print('Skipped %d for rank > 5, %d for no gt' % (
            len(rank_skips),
            len(gt_skips),
        ))
        print(np.unique(map(len, entropy_list)))

        def evstack(tup):
            return np.vstack(tup) if len(tup) > 0 else np.empty((0, 2))

        def ehstack(tup):
            return np.hstack(tup) if len(tup) > 0 else np.empty((0, 2))

        entropy_pairs = evstack(entropy_list)
        scale_pairs = evstack(scale_list)
        scores = ehstack(score_list)
        print('\n * Measured %d pairs' % len(entropy_pairs))
        return entropy_pairs, scale_pairs, scores
Ejemplo n.º 8
0
    def measure_feat_pairs(allres, orgtype='top_true'):
        print('Measure ' + orgtype + ' pairs')
        orgres = allres.__dict__[orgtype]
        entropy_list = []
        scale_list = []
        score_list = []
        lbl = 'Measuring ' + orgtype + ' pair '
        fmt_str = utool.make_progress_fmt_str(len(orgres), lbl)
        rank_skips = []
        gt_skips = []
        for ix, (qrid, rid, score, rank) in enumerate(orgres.iter()):
            utool.print_(fmt_str % (ix + 1,))
            # Skip low ranks
            if rank > 5:
                rank_skips.append(qrid)
                continue
            other_rids = ibs.get_other_indexed_rids(qrid)
            # Skip no groundtruth
            if len(other_rids) == 0:
                gt_skips.append(qrid)
                continue
            qres = qrid2_qres[qrid]
            # Get matching feature indexes
            fm = qres.cx2_fm[rid]
            # Get their scores
            fs = qres.cx2_fs[rid]
            # Get matching descriptors
            printDBG('\nfm.shape=%r' % (fm.shape,))
            desc1 = cx2_desc[qrid][fm[:, 0]]
            desc2 = cx2_desc[rid][fm[:, 1]]
            # Get matching keypoints
            kpts1 = cx2_kpts[qrid][fm[:, 0]]
            kpts2 = cx2_kpts[rid][fm[:, 1]]
            # Get their scale
            scale1_m = ktool.get_scales(kpts1)
            scale2_m = ktool.get_scales(kpts2)
            # Get their entropy
            entropy1 = descriptor_entropy(desc1, bw_factor=1)
            entropy2 = descriptor_entropy(desc2, bw_factor=1)
            # Append to results
            entropy_tup = np.array(zip(entropy1, entropy2))
            scale_tup   = np.array(zip(scale1_m, scale2_m))
            entropy_tup = entropy_tup.reshape(len(entropy_tup), 2)
            scale_tup   = scale_tup.reshape(len(scale_tup), 2)
            entropy_list.append(entropy_tup)
            scale_list.append(scale_tup)
            score_list.append(fs)
        print('Skipped %d total.' % (len(rank_skips) + len(gt_skips),))
        print('Skipped %d for rank > 5, %d for no gt' % (len(rank_skips), len(gt_skips),))
        print(np.unique(map(len, entropy_list)))

        def evstack(tup):
            return np.vstack(tup) if len(tup) > 0 else np.empty((0, 2))

        def ehstack(tup):
            return np.hstack(tup) if len(tup) > 0 else np.empty((0, 2))

        entropy_pairs = evstack(entropy_list)
        scale_pairs   = evstack(scale_list)
        scores        = ehstack(score_list)
        print('\n * Measured %d pairs' % len(entropy_pairs))
        return entropy_pairs, scale_pairs, scores
Ejemplo n.º 9
0
def test_homog_errors(H,
                      kpts1,
                      kpts2,
                      fm,
                      xy_thresh_sqrd,
                      scale_thresh,
                      ori_thresh,
                      full_homog_checks=True):
    r"""
    Test to see which keypoints the homography correctly maps

    Args:
        H (ndarray[float64_t, ndim=2]):  homography/perspective matrix
        kpts1 (ndarray[float32_t, ndim=2]):  keypoints
        kpts2 (ndarray[float32_t, ndim=2]):  keypoints
        fm (list):  list of feature matches as tuples (qfx, dfx)
        xy_thresh_sqrd (float):
        scale_thresh (float):
        ori_thresh (float):  angle in radians
        full_homog_checks (bool):

    Returns:
        tuple: homog_tup1

    CommandLine:
        python -m vtool.spatial_verification --test-test_homog_errors:0 --show
        python -m vtool.spatial_verification --test-test_homog_errors:0 --show --rotation_invariance
        python -m vtool.spatial_verification --test-test_homog_errors:0 --show --rotation_invariance --no-affine-invariance --xy-thresh=.001
        python -m vtool.spatial_verification --test-test_homog_errors:0 --show --rotation_invariance --no-affine-invariance --xy-thresh=.001 --no-full-homog-checks
        python -m vtool.spatial_verification --test-test_homog_errors:0 --show --no-full-homog-checks
        # --------------
        # Shows (sorta) how inliers are computed
        python -m vtool.spatial_verification --test-test_homog_errors:1 --show
        python -m vtool.spatial_verification --test-test_homog_errors:1 --show --rotation_invariance
        python -m vtool.spatial_verification --test-test_homog_errors:1 --show --rotation_invariance --no-affine-invariance --xy-thresh=.001
        python -m vtool.spatial_verification --test-test_homog_errors:1 --show --rotation_invariance --xy-thresh=.001
        python -m vtool.spatial_verification --test-test_homog_errors:0 --show --rotation_invariance --xy-thresh=.001

    Example0:
        >>> # DISABLE_DOCTEST
        >>> from vtool.spatial_verification import *  # NOQA
        >>> import plottool as pt
        >>> kpts1, kpts2, fm, aff_inliers, rchip1, rchip2, xy_thresh_sqrd = testdata_matching_affine_inliers()
        >>> H = estimate_refined_transform(kpts1, kpts2, fm, aff_inliers)
        >>> scale_thresh, ori_thresh = 2.0, 1.57
        >>> full_homog_checks = not ut.get_argflag('--no-full-homog-checks')
        >>> homog_tup1 = test_homog_errors(H, kpts1, kpts2, fm, xy_thresh_sqrd, scale_thresh, ori_thresh, full_homog_checks)
        >>> homog_tup = (homog_tup1[0], homog_tup1[2])
        >>> ut.quit_if_noshow()
        >>> pt.draw_sv.show_sv(rchip1, rchip2, kpts1, kpts2, fm, homog_tup=homog_tup)
        >>> ut.show_if_requested()

    Example1:
        >>> # DISABLE_DOCTEST
        >>> from vtool.spatial_verification import *  # NOQA
        >>> import plottool as pt
        >>> kpts1, kpts2, fm_, aff_inliers, rchip1, rchip2, xy_thresh_sqrd = testdata_matching_affine_inliers()
        >>> H = estimate_refined_transform(kpts1, kpts2, fm_, aff_inliers)
        >>> scale_thresh, ori_thresh = 2.0, 1.57
        >>> full_homog_checks = not ut.get_argflag('--no-full-homog-checks')
        >>> # ----------------
        >>> # Take subset of feature matches
        >>> fm = fm_
        >>> scale_err, xy_err, ori_err = \
        ...     ut.exec_func_src(test_homog_errors, globals(), locals(),
        ...     'scale_err, xy_err, ori_err'.split(', '))
        >>> # we only care about checking out scale and orientation here. ignore bad xy points
        >>> xy_inliers_flag = np.less(xy_err, xy_thresh_sqrd)
        >>> scale_err[~xy_inliers_flag] = 0
        >>> # filter
        >>> fm = fm_[np.array(scale_err).argsort()[::-1][:10]]
        >>> fm = fm_[np.array(scale_err).argsort()[::-1][:10]]
        >>> # Exec sourcecode
        >>> kpts1_m, kpts2_m, off_xy1_m, off_xy1_mt, dxy1_m, dxy1_mt, xy2_m, xy1_m, xy1_mt, scale_err, xy_err, ori_err = \
        ...     ut.exec_func_src(test_homog_errors, globals(), locals(),
        ...     'kpts1_m, kpts2_m, off_xy1_m, off_xy1_mt, dxy1_m, dxy1_mt, xy2_m, xy1_m, xy1_mt, scale_err, xy_err, ori_err'.split(', '))
        >>> #---------------
        >>> ut.quit_if_noshow()
        >>> pt.figure(fnum=1, pnum=(1, 2, 1), title='orig points and offset point')
        >>> segments_list1 = np.array(list(zip(xy1_m.T.tolist(), off_xy1_m.T.tolist())))
        >>> pt.draw_line_segments(segments_list1, color=pt.LIGHT_BLUE)
        >>> pt.dark_background()
        >>> #---------------
        >>> pt.figure(fnum=1, pnum=(1, 2, 2), title='transformed points and matching points')
        >>> #---------------
        >>> # first have to make corresponding offset points
        >>> # Use reference point for scale and orientation tests
        >>> oris2_m   = ktool.get_oris(kpts2_m)
        >>> scales2_m = ktool.get_scales(kpts2_m)
        >>> dxy2_m    = np.vstack((np.sin(oris2_m), -np.cos(oris2_m)))
        >>> scaled_dxy2_m = dxy2_m * scales2_m[None, :]
        >>> off_xy2_m = xy2_m + scaled_dxy2_m
        >>> # Draw transformed semgents
        >>> segments_list2 = np.array(list(zip(xy2_m.T.tolist(), off_xy2_m.T.tolist())))
        >>> pt.draw_line_segments(segments_list2, color=pt.GREEN)
        >>> # Draw corresponding matches semgents
        >>> segments_list3 = np.array(list(zip(xy1_mt.T.tolist(), off_xy1_mt.T.tolist())))
        >>> pt.draw_line_segments(segments_list3, color=pt.RED)
        >>> # Draw matches between correspondences
        >>> segments_list4 = np.array(list(zip(xy1_mt.T.tolist(), xy2_m.T.tolist())))
        >>> pt.draw_line_segments(segments_list4, color=pt.ORANGE)
        >>> pt.dark_background()
        >>> #---------------
        >>> #vt.get _xy_axis_extents(kpts1_m)
        >>> #pt.draw_sv.show_sv(rchip1, rchip2, kpts1, kpts2, fm, homog_tup=homog_tup)
        >>> ut.show_if_requested()
    """
    kpts1_m = kpts1.take(fm.T[0], axis=0)
    kpts2_m = kpts2.take(fm.T[1], axis=0)
    # Transform all xy1 matches to xy2 space
    xy1_m = ktool.get_xys(kpts1_m)
    #with ut.embed_on_exception_context:
    xy1_mt = ltool.transform_points_with_homography(H, xy1_m)
    #xy1_mt  = ktool.transform_kpts_xys(H, kpts1_m)
    xy2_m = ktool.get_xys(kpts2_m)
    # --- Find (Squared) Homography Distance Error ---
    # You cannot test for scale or orientation easily here because
    # you no longer have an ellipse? (maybe, probably have a conic) when using a
    # projective transformation
    xy_err = dtool.L2_sqrd(xy1_mt.T, xy2_m.T)
    # Estimate final inliers
    #ut.embed()
    if full_homog_checks:
        # TODO: may need to use more than one reference point
        # Use reference point for scale and orientation tests
        oris1_m = ktool.get_oris(kpts1_m)
        scales1_m = ktool.get_scales(kpts1_m)
        # Get point offsets with unit length
        dxy1_m = np.vstack((np.sin(oris1_m), -np.cos(oris1_m)))
        scaled_dxy1_m = dxy1_m * scales1_m[None, :]
        off_xy1_m = xy1_m + scaled_dxy1_m
        # transform reference point
        off_xy1_mt = ltool.transform_points_with_homography(H, off_xy1_m)
        scaled_dxy1_mt = xy1_mt - off_xy1_mt
        scales1_mt = npl.norm(scaled_dxy1_mt, axis=0)
        #with warnings.catch_warnings():
        #    warnings.simplefilter("ignore")
        dxy1_mt = scaled_dxy1_mt / scales1_mt
        # adjust for gravity vector being 0
        oris1_mt = np.arctan2(dxy1_mt[1], dxy1_mt[0]) - ktool.GRAVITY_THETA
        _det1_mt = scales1_mt**2
        det2_m = ktool.get_sqrd_scales(kpts2_m)
        ori2_m = ktool.get_oris(kpts2_m)
        #xy_err    = dtool.L2_sqrd(xy2_m.T, _xy1_mt.T)
        scale_err = dtool.det_distance(_det1_mt, det2_m)
        ori_err = dtool.ori_distance(oris1_mt, ori2_m)
        ###
        xy_inliers_flag = np.less(xy_err, xy_thresh_sqrd)
        scale_inliers_flag = np.less(scale_err, scale_thresh)
        ori_inliers_flag = np.less(ori_err, ori_thresh)
        hypo_inliers_flag = xy_inliers_flag  # Try to re-use memory
        np.logical_and(hypo_inliers_flag,
                       ori_inliers_flag,
                       out=hypo_inliers_flag)
        np.logical_and(hypo_inliers_flag,
                       scale_inliers_flag,
                       out=hypo_inliers_flag)
        # Seems slower due to memory
        #hypo_inliers_flag = np.logical_and.reduce(
        #    [xy_inliers_flag, ori_inliers_flag, scale_inliers_flag])
        # this is also slower
        #hypo_inliers_flag = np.logical_and.reduce((xy_inliers_flag,
        #ori_inliers_flag, scale_inliers_flag), out=xy_inliers_flag)
        refined_inliers = np.where(hypo_inliers_flag)[0].astype(INDEX_DTYPE)
        refined_errors = (xy_err, ori_err, scale_err)
    else:
        refined_inliers = np.where(
            xy_err < xy_thresh_sqrd)[0].astype(INDEX_DTYPE)
        refined_errors = (xy_err, None, None)
    homog_tup1 = (refined_inliers, refined_errors, H)
    return homog_tup1
Ejemplo n.º 10
0
def test_homog_errors(H, kpts1, kpts2, fm, xy_thresh_sqrd, scale_thresh,
                      ori_thresh, full_homog_checks=True):
    r"""
    Test to see which keypoints the homography correctly maps

    Args:
        H (ndarray[float64_t, ndim=2]):  homography/perspective matrix
        kpts1 (ndarray[float32_t, ndim=2]):  keypoints
        kpts2 (ndarray[float32_t, ndim=2]):  keypoints
        fm (list):  list of feature matches as tuples (qfx, dfx)
        xy_thresh_sqrd (float):
        scale_thresh (float):
        ori_thresh (float):  angle in radians
        full_homog_checks (bool):

    Returns:
        tuple: homog_tup1

    CommandLine:
        python -m vtool.spatial_verification --test-test_homog_errors:0 --show
        python -m vtool.spatial_verification --test-test_homog_errors:0 --show --rotation_invariance
        python -m vtool.spatial_verification --test-test_homog_errors:0 --show --rotation_invariance --no-affine-invariance --xy-thresh=.001
        python -m vtool.spatial_verification --test-test_homog_errors:0 --show --rotation_invariance --no-affine-invariance --xy-thresh=.001 --no-full-homog-checks
        python -m vtool.spatial_verification --test-test_homog_errors:0 --show --no-full-homog-checks
        # --------------
        # Shows (sorta) how inliers are computed
        python -m vtool.spatial_verification --test-test_homog_errors:1 --show
        python -m vtool.spatial_verification --test-test_homog_errors:1 --show --rotation_invariance
        python -m vtool.spatial_verification --test-test_homog_errors:1 --show --rotation_invariance --no-affine-invariance --xy-thresh=.001
        python -m vtool.spatial_verification --test-test_homog_errors:1 --show --rotation_invariance --xy-thresh=.001
        python -m vtool.spatial_verification --test-test_homog_errors:0 --show --rotation_invariance --xy-thresh=.001

    Example0:
        >>> # DISABLE_DOCTEST
        >>> from vtool.spatial_verification import *  # NOQA
        >>> import plottool as pt
        >>> kpts1, kpts2, fm, aff_inliers, rchip1, rchip2, xy_thresh_sqrd = testdata_matching_affine_inliers()
        >>> H = estimate_refined_transform(kpts1, kpts2, fm, aff_inliers)
        >>> scale_thresh, ori_thresh = 2.0, 1.57
        >>> full_homog_checks = not ut.get_argflag('--no-full-homog-checks')
        >>> homog_tup1 = test_homog_errors(H, kpts1, kpts2, fm, xy_thresh_sqrd, scale_thresh, ori_thresh, full_homog_checks)
        >>> homog_tup = (homog_tup1[0], homog_tup1[2])
        >>> ut.quit_if_noshow()
        >>> pt.draw_sv.show_sv(rchip1, rchip2, kpts1, kpts2, fm, homog_tup=homog_tup)
        >>> ut.show_if_requested()

    Example1:
        >>> # DISABLE_DOCTEST
        >>> from vtool.spatial_verification import *  # NOQA
        >>> import plottool as pt
        >>> kpts1, kpts2, fm_, aff_inliers, rchip1, rchip2, xy_thresh_sqrd = testdata_matching_affine_inliers()
        >>> H = estimate_refined_transform(kpts1, kpts2, fm_, aff_inliers)
        >>> scale_thresh, ori_thresh = 2.0, 1.57
        >>> full_homog_checks = not ut.get_argflag('--no-full-homog-checks')
        >>> # ----------------
        >>> # Take subset of feature matches
        >>> fm = fm_
        >>> scale_err, xy_err, ori_err = \
        ...     ut.exec_func_src(test_homog_errors, globals(), locals(),
        ...     'scale_err, xy_err, ori_err'.split(', '))
        >>> # we only care about checking out scale and orientation here. ignore bad xy points
        >>> xy_inliers_flag = np.less(xy_err, xy_thresh_sqrd)
        >>> scale_err[~xy_inliers_flag] = 0
        >>> # filter
        >>> fm = fm_[np.array(scale_err).argsort()[::-1][:10]]
        >>> fm = fm_[np.array(scale_err).argsort()[::-1][:10]]
        >>> # Exec sourcecode
        >>> kpts1_m, kpts2_m, off_xy1_m, off_xy1_mt, dxy1_m, dxy1_mt, xy2_m, xy1_m, xy1_mt, scale_err, xy_err, ori_err = \
        ...     ut.exec_func_src(test_homog_errors, globals(), locals(),
        ...     'kpts1_m, kpts2_m, off_xy1_m, off_xy1_mt, dxy1_m, dxy1_mt, xy2_m, xy1_m, xy1_mt, scale_err, xy_err, ori_err'.split(', '))
        >>> #---------------
        >>> ut.quit_if_noshow()
        >>> pt.figure(fnum=1, pnum=(1, 2, 1), title='orig points and offset point')
        >>> segments_list1 = np.array(list(zip(xy1_m.T.tolist(), off_xy1_m.T.tolist())))
        >>> pt.draw_line_segments(segments_list1, color=pt.LIGHT_BLUE)
        >>> pt.dark_background()
        >>> #---------------
        >>> pt.figure(fnum=1, pnum=(1, 2, 2), title='transformed points and matching points')
        >>> #---------------
        >>> # first have to make corresponding offset points
        >>> # Use reference point for scale and orientation tests
        >>> oris2_m   = ktool.get_oris(kpts2_m)
        >>> scales2_m = ktool.get_scales(kpts2_m)
        >>> dxy2_m    = np.vstack((np.sin(oris2_m), -np.cos(oris2_m)))
        >>> scaled_dxy2_m = dxy2_m * scales2_m[None, :]
        >>> off_xy2_m = xy2_m + scaled_dxy2_m
        >>> # Draw transformed semgents
        >>> segments_list2 = np.array(list(zip(xy2_m.T.tolist(), off_xy2_m.T.tolist())))
        >>> pt.draw_line_segments(segments_list2, color=pt.GREEN)
        >>> # Draw corresponding matches semgents
        >>> segments_list3 = np.array(list(zip(xy1_mt.T.tolist(), off_xy1_mt.T.tolist())))
        >>> pt.draw_line_segments(segments_list3, color=pt.RED)
        >>> # Draw matches between correspondences
        >>> segments_list4 = np.array(list(zip(xy1_mt.T.tolist(), xy2_m.T.tolist())))
        >>> pt.draw_line_segments(segments_list4, color=pt.ORANGE)
        >>> pt.dark_background()
        >>> #---------------
        >>> #vt.get _xy_axis_extents(kpts1_m)
        >>> #pt.draw_sv.show_sv(rchip1, rchip2, kpts1, kpts2, fm, homog_tup=homog_tup)
        >>> ut.show_if_requested()
    """
    kpts1_m = kpts1.take(fm.T[0], axis=0)
    kpts2_m = kpts2.take(fm.T[1], axis=0)
    # Transform all xy1 matches to xy2 space
    xy1_m   = ktool.get_xys(kpts1_m)
    #with ut.embed_on_exception_context:
    xy1_mt  = ltool.transform_points_with_homography(H, xy1_m)
    #xy1_mt  = ktool.transform_kpts_xys(H, kpts1_m)
    xy2_m   = ktool.get_xys(kpts2_m)
    # --- Find (Squared) Homography Distance Error ---
    # You cannot test for scale or orientation easily here because
    # you no longer have an ellipse? (maybe, probably have a conic) when using a
    # projective transformation
    xy_err = dtool.L2_sqrd(xy1_mt.T, xy2_m.T)
    # Estimate final inliers
    #ut.embed()
    if full_homog_checks:
        # TODO: may need to use more than one reference point
        # Use reference point for scale and orientation tests
        oris1_m   = ktool.get_oris(kpts1_m)
        scales1_m = ktool.get_scales(kpts1_m)
        # Get point offsets with unit length
        dxy1_m    = np.vstack((np.sin(oris1_m), -np.cos(oris1_m)))
        scaled_dxy1_m = dxy1_m * scales1_m[None, :]
        off_xy1_m = xy1_m + scaled_dxy1_m
        # transform reference point
        off_xy1_mt = ltool.transform_points_with_homography(H, off_xy1_m)
        scaled_dxy1_mt = xy1_mt - off_xy1_mt
        scales1_mt = npl.norm(scaled_dxy1_mt, axis=0)
        #with warnings.catch_warnings():
        #    warnings.simplefilter("ignore")
        dxy1_mt = scaled_dxy1_mt / scales1_mt
        # adjust for gravity vector being 0
        oris1_mt = np.arctan2(dxy1_mt[1], dxy1_mt[0]) - ktool.GRAVITY_THETA
        _det1_mt = scales1_mt ** 2
        det2_m = ktool.get_sqrd_scales(kpts2_m)
        ori2_m = ktool.get_oris(kpts2_m)
        #xy_err    = dtool.L2_sqrd(xy2_m.T, _xy1_mt.T)
        scale_err = dtool.det_distance(_det1_mt, det2_m)
        ori_err   = dtool.ori_distance(oris1_mt, ori2_m)
        ###
        xy_inliers_flag = np.less(xy_err, xy_thresh_sqrd)
        scale_inliers_flag = np.less(scale_err, scale_thresh)
        ori_inliers_flag   = np.less(ori_err, ori_thresh)
        hypo_inliers_flag = xy_inliers_flag  # Try to re-use memory
        np.logical_and(hypo_inliers_flag, ori_inliers_flag, out=hypo_inliers_flag)
        np.logical_and(hypo_inliers_flag, scale_inliers_flag, out=hypo_inliers_flag)
        # Seems slower due to memory
        #hypo_inliers_flag = np.logical_and.reduce(
        #    [xy_inliers_flag, ori_inliers_flag, scale_inliers_flag])
        # this is also slower
        #hypo_inliers_flag = np.logical_and.reduce((xy_inliers_flag,
        #ori_inliers_flag, scale_inliers_flag), out=xy_inliers_flag)
        refined_inliers = np.where(hypo_inliers_flag)[0].astype(INDEX_DTYPE)
        refined_errors = (xy_err, ori_err, scale_err)
    else:
        refined_inliers = np.where(xy_err < xy_thresh_sqrd)[0].astype(INDEX_DTYPE)
        refined_errors = (xy_err, None, None)
    homog_tup1 = (refined_inliers, refined_errors, H)
    return homog_tup1