示例#1
0
def dump_matching_result(fn2, testset_full_path):
    #外部ファイルに出力する
    #個々のファイル毎にデータを出力する
    fn, ext = os.path.splitext(fn2)
    testcase_full_path = os.path.join(testset_full_path, fn2)
    imgT = cv2.imread(testcase_full_path, 0)
    if imgT is None:
        logger.info('Failed to load fn2:', testcase_full_path)
        raise ValueError('Not found the file')
    logger.info("Using Training: {}".format(fn2))

    pool = ThreadPool(processes=cv2.getNumberOfCPUs())
    with Timer('Detection'):
        kpT, descT = spla.affine_detect(detector,
                                        imgT,
                                        pool=pool,
                                        simu_param='test')
    logger.info('imgQ - %d features, imgT - %d features' %
                (spla.count_keypoints(splt_kpQ), len(kpT)))

    with Timer('matching'):
        mesh_pQ, mesh_pT, mesh_pairs = spla.match_with_cross(
            matcher, splt_descQ, splt_kpQ, descT, kpT)

    index_mesh_pairs = format4pickle_pairs(mesh_pairs)
    import joblib
    dump_match_testcase_dir = myfsys.setup_output_directory(dump_match_dir, fn)
    joblib.dump(mesh_pQ,
                os.path.join(dump_match_testcase_dir, 'mesH_pQ.pikle'),
                compress=True)
    joblib.dump(mesh_pT,
                os.path.join(dump_match_testcase_dir, 'mesH_pT.pikle'),
                compress=True)
    import pickle
    with open(os.path.join(dump_match_testcase_dir, 'mesh_pairs.pickle'),
              'wb') as f:
        pickle.dump(index_mesh_pairs, f)
        f.close()
    # for i, mesh_pair in enumerate(index_mesh_pairs):
    #     joblib.dump(mesh_pair, os.path.join(dump_detected_testcase_dir, "mesh_pairs_{0:02d}.pikle".format(i)),
    #                 compress=True)

    with Timer('estimation'):
        Hs, statuses, pairs = spla.calclate_Homography4splitmesh(mesh_pQ,
                                                                 mesh_pT,
                                                                 mesh_pairs,
                                                                 median=median)
    joblib.dump(Hs,
                os.path.join(dump_match_testcase_dir, 'Hs.pikle'),
                compress=True)
    joblib.dump(statuses,
                os.path.join(dump_match_testcase_dir, 'statuses.pikle'),
                compress=True)
    index_pairs = tuple(
        tuple((p.pt, p.size, p.angle, p.response, p.octave, p.class_id)
              for p in pair) for pair in pairs)
    with open(os.path.join(dump_match_testcase_dir, 'pairs.pickle'),
              'wb') as f:
        pickle.dump(index_pairs, f)
示例#2
0
def detect_and_match(detector, matcher, set_fn, splt_num=64, simu_type="default"):
    """
    SplitA実験
    set_fn:
    """
    fnQ, testcase, fnT = set_fn
    def get_expt_names():
        tmpf, tmpext = os.path.splitext(fnT)
        return (os.path.basename(__file__), testcase, tmpf)
    expt_names = get_expt_names()
    logger = setup(expt_names)
    logger.info(__doc__)

    full_fnQ = myfsys.getf_template((fnQ,))
    full_fnT = myfsys.getf_input(testcase, fnT)
    imgQ, imgT = read_images(full_fnQ, full_fnT, logger)

    pool = ThreadPool(processes=cv2.getNumberOfCPUs())
    with Timer('Detection with SPLIT-ASIFT', logger):
        splt_kpQ, splt_descQ = spltA.affine_detect_into_mesh(detector, splt_num, imgQ, simu_param=simu_type)
    with Timer('Detection with SFIT', logger):
        kpT, descT = affine_detect(detector, imgT, pool=pool, simu_param='test')
    logger.info('imgQ - {0} features, imgT - {1} features'.format(spltA.count_keypoints(splt_kpQ), len(kpT)))

    with Timer('matching', logger):
        mesh_pQ, mesh_pT, mesh_pairs = spltA.match_with_cross(matcher, splt_descQ, splt_kpQ, descT, kpT)

    Hs = []
    statuses = []
    kp_pairs_long = []
    Hs_stable = []
    kp_pairs_long_stable = []
    for pQ, pT, pairs in zip(mesh_pQ, mesh_pT, mesh_pairs):
        pairs, H, status = calclate_Homography(pQ, pT, pairs)
        Hs.append(H)
        statuses.append(status)
        if status is not None and not len(status) == 0 and np.sum(status)/len(status) >= 0.4:
            Hs_stable.append(H)
        else:
            Hs_stable.append(None)
        for p in pairs:
            kp_pairs_long.append(p)
            if status is not None and not len(status) == 0 and np.sum(status)/len(status) >= 0.4:
                kp_pairs_long_stable.append(p)

    vis = draw_matches_for_meshes(imgQ, imgT, Hs=Hs)
    cv2.imwrite(myfsys.getf_output(expt_names, 'meshes.png'), vis)

    visS = draw_matches_for_meshes(imgQ, imgT, Hs=Hs_stable)
    cv2.imwrite(myfsys.getf_output(expt_names, 'meshes_stable.png'), visS)

    viw = explore_match_for_meshes('affine find_obj', imgQ, imgT, kp_pairs_long_stable, Hs=Hs_stable)
    cv2.imwrite(myfsys.getf_output(expt_names, 'meshes_and_keypoints_stable.png'), viw)

    return vis, visS, viw
def get_matched_points(dumped_exdir, testset_name, fn, sdscQ, skpQ, dscT, kpT):
    try:
        with splta.Timer('Loarding matching pickle'):
            mesh_pQ, mesh_pT, mesh_pairs = m_in.load_pickle_match_with_cross(
                dumped_exdir, testset_name, fn)
            # mesh_pQ, mesh_pT, mesh_pairs = splta_c.match_with_cross(matcher, splt_descQ, splt_kpQ, descT, kpT)
    except:
        print('Failed Load matching result')
        with splta.Timer('matching'):
            mesh_pQ, mesh_pT, mesh_pairs = splta.match_with_cross(
                matcher, sdscQ, skpQ, dscT, kpT)
    return mesh_pQ, mesh_pT, mesh_pairs
def calculate_each_mesh(column_num, detector, input_fns, matcher, results, row_num, s_descQ, s_kpQ):
    imgT, kpT, descT = emod.detect(detector, input_fns)
    with Timer('matching'):
        mesh_pQ, mesh_pT, mesh_pairs = saf.match_with_cross(matcher, s_descQ, s_kpQ, descT, kpT)

    def f(pQ, pT, p):
        inlier_pairs, H, status = calclate_Homography(pQ, pT, p)
        if status is None:
            status = []
        return [len(inlier_pairs), len(status), len(p)]

    pairs_on_meshes = np.array(list(map(f, zip(mesh_pQ, mesh_pT, mesh_pairs))))
    # pool = ThreadPool(processes=cv2.getNumberOfCPUs())
    # pairs_on_mesh_list = np.array(pool.imap(f, zip(range(len(mesh_pQ)), mesh_pQ, mesh_pT, mesh_pairs)))
    # pairs_on_mesh = pairs_on_mesh.reshape(row_num, column_num)
    results.append(pairs_on_meshes.reshape(row_num, column_num))
    def clc(testcase_fn):
        logger.info('Test Case:{}'.format(testcase_fn))
        testcase_full_path = os.path.join(testset_full_path, testcase_fn)
        imgT, kpT, descT = emod.detect(detector, testcase_full_path)
        if len(kpT) == 0:
            return np.zeros((row_num, column_num, 3))
        with Timer('matching'):
            mesh_pQ, mesh_pT, mesh_pairs = saf.match_with_cross(matcher, s_descQ, s_kpQ, descT, kpT)

        def f(*pQpTp):
            inlier_pairs, H, status = calclate_Homography(pQpTp[0], pQpTp[1], pQpTp[2])
            if status is None:
                status = []
            return [len(inlier_pairs), len(status), len(pQpTp[2])]

        pairs_on_meshes = np.array(list(map(f, mesh_pQ, mesh_pT, mesh_pairs)))

        return pairs_on_meshes.reshape(row_num, column_num, 3)
                                         pool=pool,
                                         simu_param='test')
    print('imgQ - %d features, imgT - %d features' %
          (splta.count_keypoints(splt_kpQ), len(kpT)))

    dumped_exdir = "expt_split_affinesim"
    # dumped_exdir = "expt_split_affinesim_conbine"
    try:
        with splta.Timer('Loarding matching pickle'):
            mesh_pQ, mesh_pT, mesh_pairs = load_pickle_match_with_cross(
                dumped_exdir, testset_name, fn)
            # mesh_pQ, mesh_pT, mesh_pairs = splta.match_with_cross(matcher, splt_descQ, splt_kpQ, descT, kpT)
    except:
        print('Failed Load matching result')
        with splta.Timer('matching'):
            mesh_pQ, mesh_pT, mesh_pairs = splta.match_with_cross(
                matcher, splt_descQ, splt_kpQ, descT, kpT)

    try:
        with splta.Timer('Loading estimation result'):
            Hs, statuses, pairs = load_pickle_calclate_Homography4splitmesh(
                dumped_exdir, testset_name, fn)
            # Hs, statuses, pairs = splta.calclate_Homography4splitmesh(mesh_pQ, mesh_pT, mesh_pairs, median=median)
    except:
        print('Failed loading estimated mesh')
        with splta.Timer('estimation'):
            Hs, statuses, pairs = splta.calclate_Homography4splitmesh(
                mesh_pQ, mesh_pT, mesh_pairs, median=median)

    # 検出不可能メッシュ
    denied_mesh = list(is_detectable(len(match), median) for match in mesh_pQ)
    denied_num = len(denied_mesh) - sum(denied_mesh)
示例#7
0
                                                           imgQ, simu_param='default')

    sk_num = count_keypoints(splt_kpQ)
    m_skQ, m_sdQ, m_k_num, merged_map = combine_mesh_compact(splt_kpQ, splt_descQ, temp_inf)
    if not sk_num == count_keypoints(m_skQ) and not count_keypoints(m_skQ) == np.sum(m_k_num):
        print('{0}, {1}, {2}'.format(sk_num, count_keypoints(m_skQ), np.sum(m_k_num)))
        sys.exit(1)
    median = np.nanmedian(m_k_num)
    list_merged_mesh_id = list(set(np.ravel(merged_map)))

    pool = ThreadPool(processes=cv2.getNumberOfCPUs())
    with Timer('Detection'):
        kpT, descT = affine_detect(detector, imgT, pool=pool, simu_param='test')

    with Timer('matching'):
        mesh_pQ, mesh_pT, mesh_pairs = match_with_cross(matcher, m_sdQ, m_skQ, descT, kpT)

    # Hs, statuses, pairs = calclate_Homography4splitmesh(mesh_pQ, mesh_pT, mesh_pairs)
    with Timer('estimation'):
        Hs, statuses, pairs = calclate_Homography4splitmesh(mesh_pQ, mesh_pT, mesh_pairs, median=median)

    vis = draw_matches_for_meshes(imgQ, imgT, temp_inf=temp_inf, Hs=Hs, list_merged_mesh_id=list_merged_mesh_id,
                                  merged_map=merged_map)
    cv2.imshow('view weak meshes', vis)
    cv2.imwrite('qrmarker_detection_merged.png', vis)
    cv2.waitKey()

    # viw = explore_match_for_meshes('affine find_obj', imgQ, imgT, pairs,
    #                                temp_inf=temp_inf, Hs=Hs,
    #                                list_merged_mesh_id=list_merged_mesh_id, merged_map=merged_map)
    #
    if detector is None:
        logger.info('unknown feature:{}'.format(feature_name))
        sys.exit(1)

    split_num = column_num * row_num
    img_q, splt_kp_q, splt_desc_q = split_asift_detect(detector, fn1,
                                                       split_num)

    logger.debug('using {}'.format(feature_name))

    img_t, kp_t, desc_t = emod.detect(detector, fn2)
    print('imgQ - %d features, imgT - %d features' %
          (saf.count_keypoints(splt_kp_q), len(kp_t)))

    with Timer('matching'):
        mesh_pQ, mesh_pT, mesh_pairs = saf.match_with_cross(
            matcher, splt_desc_q, splt_kp_q, desc_t, kp_t)

    list_H, statuses, kp_pairs, pairs_on_meshes = calculate_hompgraphy(
        mesh_pQ, mesh_pT, mesh_pairs)

    mt_p = pairs_on_meshes[:, :, 0]
    mt_s = pairs_on_meshes[:, :, 1]
    mt_m = pairs_on_meshes[:, :, 2]
    ratio_ps = mt_p / mt_s
    ratio_pm = mt_p / mt_m
    logger.info(mt_p)
    logger.info(mt_s)
    logger.info(mt_m)
    logger.info(ratio_ps)
    logger.info(ratio_pm)
示例#9
0
def match(matcher, kpQ, descQ, kpT, descT, id):
    with Timer('matching', logger):
        mesh_pQ, mesh_pT, mesh_pairs = spltA.match_with_cross(matcher, descQ, kpQ, descT, kpT)
    return mesh_pQ, mesh_pT, mesh_pairs, id