def build_db(fn_pattern='.jpg'):

    src_img_dir = os.path.join(Pascal3D_root, 'Images')
    #
    try: os.makedirs(db_path)
    except: pass
    imdb = ImageData_lmdb(db_path, 'w')  # 'a+')  #

    allIDs = []
    for collection in ['train','val']:
        for label, cate in enumerate(categories):
            _, rcobjs = get_anno(cate, collection=collection, filter='all', img_scale='Org', withCoarseVp=True)
            imgIDs = get_imgIDs(rcobjs)

            print ('%15s  %s   %5d' % (cate, collection, len(imgIDs)))
            for i, imgID in enumerate(tqdm(imgIDs)):
                # if is_py3:
                #     imgID = imgID.decode('UTF-8')

                if imgID[0]=='n':
                    fo = '%s_imagenet' % cate
                else:
                    fo = '%s_pascal'   % cate
                image_file = os.path.join(src_img_dir, fo, '%s.%s' % (imgID, fn_pattern.strip('.')))
                assert os.path.exists(image_file), image_file

                img = cv2.imread(image_file) # , cv2.IMREAD_UNCHANGED)
                imdb[imgID] = img
                allIDs.append(imgID)

    imdb.close()
    print ('All Images: %d' % len(allIDs))
    print ('All Images: %d  (unique)' % len(set(allIDs)))

    return db_path
def main(
        collection='train',
        filter='all',
        cates=categories,  # cates=['aeroplane','boat','car'],  #
):

    out_dir = '../anno_db_v2/data.cache/objId2gtbox'

    try:
        os.makedirs(out_dir)
    except:
        print('Make dirs skipped!')

    # from multiprocessing import Pool
    objId2gtbox = dict()
    nr_box = 0
    for cate in cates:
        print(' >>> %10s %5s  %20s    ' % (collection, filter, cate))
        objIDs, rcobjs = get_anno(cate, collection=collection, filter=filter)

        for _k, rcobj in enumerate(rcobjs):  # tqdm()
            gt_box = process(rcobj)  # resize_shape cate, _k, len(rcobjs)
            objId2gtbox[rcobj.obj_id] = gt_box
            nr_box += 1

    outpath = os.path.join(
        out_dir, 'cate%s_%s.%s.pkl' % (len(cates), collection, filter))
    pickle.dump(objId2gtbox, Open(outpath, 'wb'), protocol)
    print('[outpath]: ', outpath)
    print('nr_box:  ', nr_box)
Exemple #3
0
def check(cates=categories):
    nr_box = 0
    for cate in cates:
        print(cate)
        for collection in ['train', 'val']:
            objIDs, rcobjs = get_anno(cate, collection=collection)
            for _k, rcobj in enumerate(rcobjs):
                check_one(rcobj) # resize_shape cate, _k, len(rcobjs)
def eval_one(objID2aet_pred,
             cate='aeroplane',
             theta_levels=[pi / 6.],
             nr_worker=20):
    # objID2aet_pred = parse_rslt_txt(rslt_txt_file)

    keys, rcobjs = get_anno(cate, collection='val', filter='easy')
    # print('--->[eval_one] %s  '%cate, len(keys))
    vps = rcobjs.gt_view
    gt_rot_Mats = compute_RotMats(vps.a, vps.e, vps.t)

    a_preds, e_preds, t_preds = [], [], []
    for rcobj in rcobjs:
        _a, _e, _t = objID2aet_pred[rcobj.obj_id]
        a_preds.append(_a)
        e_preds.append(_e)
        t_preds.append(_t)

    a_preds = np.array(a_preds, np.float32)
    e_preds = np.array(e_preds, np.float32)
    t_preds = np.array(t_preds, np.float32)
    pred_rot_Mats = compute_RotMats(a_preds, e_preds, t_preds)

    # pool = Pool(nr_worker)
    # geo_dists = pool.map(_geodesic_dist, zip(pred_rot_Mats,gt_rot_Mats))
    geo_dists = []
    for pr_R, gt_R in zip(pred_rot_Mats, gt_rot_Mats):
        geo_dists.append(geodesic_dist_new(pr_R, gt_R))
    #
    geo_dists = np.array(geo_dists)
    #
    MedError = np.median(geo_dists) / pi * 180.
    Acc_at_ts = [
        sum(geo_dists < theta_level) / float(len(keys))
        for theta_level in theta_levels
    ]
    return MedError, Acc_at_ts