예제 #1
0
def exp_gen_and_label_hoi_cand():
    args = parser.parse_args()
    not_specified_args = manage_required_args(
        args,
        parser,
        required_args=['subset'],
        optional_args=['gen_hoi_cand', 'label_hoi_cand'])
    if len(not_specified_args) > 0:
        return

    exp_name = 'hoi_candidates'
    exp_const = ExpConstants(exp_name=exp_name)
    exp_const.subset = args.subset

    data_const = HicoConstants()
    data_const.selected_dets_hdf5 = os.path.join(
        os.getcwd(),
        'data_symlinks/hico_exp/select_confident_boxes_in_hico/' + \
        'selected_coco_cls_dets.hdf5')

    if args.gen_hoi_cand:
        print('Generating HOI candidates from Faster-RCNN dets...')
        hoi_candidates.generate(exp_const, data_const)

    if args.label_hoi_cand:
        print('Labelling HOI candidates from Faster-RCNN dets...')
        data_const.hoi_cand_hdf5 = os.path.join(
            exp_const.exp_dir, f'hoi_candidates_{exp_const.subset}.hdf5')
        label_hoi_candidates.assign(exp_const, data_const)
예제 #2
0
def exp_gen_and_label_hoi_cand_hico():
    exp_name = 'hoi_candidates'
    exp_const = ExpConstants(exp_name=exp_name)
    data_const = HicoConstants()
    data_const.selected_dets_hdf5 = os.path.join(
        os.getcwd(),
        'data_symlinks/hico_exp/select_confident_boxes_in_hico/' + \
        'selected_coco_cls_dets.hdf5')
    _exp_gen_and_label_hoi_cand(exp_const, data_const, 'hico')
예제 #3
0
def exp_detect_coco_objects_in_hico():
    exp_name = 'detect_coco_objects_in_hico'
    exp_const = ExpConstants(exp_name=exp_name)

    data_const = HicoConstants()

    prepare_data_for_faster_rcnn.prepare_hico(exp_const, data_const)
예제 #4
0
def exp_cache_box_feats():
    args = parser.parse_args()

    not_specified_args = manage_required_args(args,
                                              parser,
                                              required_args=['subset'])

    exp_name = 'hoi_candidates'
    exp_const = ExpConstants(exp_name=exp_name)
    exp_const.subset = args.subset

    data_const = HicoConstants()
    data_const.hoi_cand_hdf5 = os.path.join(
        exp_const.exp_dir, f'hoi_candidates_{exp_const.subset}.hdf5')

    cache_box_features.main(exp_const, data_const)
예제 #5
0
def exp_assign_pose_to_human_cand():
    args = parser.parse_args()

    not_specified_args = manage_required_args(args,
                                              parser,
                                              required_args=['subset'])

    exp_name = 'hoi_candidates'
    exp_const = ExpConstants(exp_name=exp_name)
    exp_const.subset = args.subset

    data_const = HicoConstants()
    data_const.hoi_cand_hdf5 = os.path.join(
        exp_const.exp_dir, f'hoi_candidates_{exp_const.subset}.hdf5')
    data_const.human_pose_dir = os.path.join(data_const.proc_dir, 'human_pose')
    data_const.num_keypoints = 18

    assign_pose_to_human_candidates.main(exp_const, data_const)
def main():
    data_const = HicoConstants()
    anno_list = io.load_json_object(data_const.anno_list_json)
    global_ids = [anno['global_id'] for anno in anno_list]
    feats_hdf5 = os.path.join(data_const.proc_dir, 'faster_rcnn_fc7.hdf5')
    feats = h5py.File(feats_hdf5, 'w')
    for global_id in tqdm(global_ids):
        fc7_npy = os.path.join(data_const.faster_rcnn_boxes,
                               f'{global_id}_fc7.npy')
        fc7 = np.load(fc7_npy)
        feats.create_dataset(global_id, data=fc7)

    feats.close()
def main():
    args = parser.parse_args()
    
    data_const = HicoConstants()
    bin_to_hoi_ids = io.load_json_object(data_const.bin_to_hoi_ids_json)
    
    mAP_json = os.path.join(args.out_dir,'mAP.json')
    APs = io.load_json_object(mAP_json)['AP']
    bin_map = {}
    bin_count = {}
    for bin_id,hoi_ids in bin_to_hoi_ids.items():
        bin_map[bin_id] = compute_mAP(APs,hoi_ids)

    non_rare_hoi_ids = []
    for ul in bin_to_hoi_ids.keys():
        if ul=='10':
            continue
        non_rare_hoi_ids += bin_to_hoi_ids[ul]

    sample_complexity_analysis = {
        'bin': bin_map,
        'full': compute_mAP(APs,APs.keys()),
        'rare': bin_map['10'],
        'non_rare': compute_mAP(APs,non_rare_hoi_ids)
    }

    sample_complexity_analysis_json = os.path.join(
        args.out_dir,
        f'sample_complexity_analysis.json')
    io.dump_json_object(
        sample_complexity_analysis,
        sample_complexity_analysis_json)


    bin_names = sorted([int(ul) for ul in bin_map.keys()])
    bin_names = [str(ul) for ul in bin_names]
    bin_headers = ['0'] + bin_names
    bin_headers = [bin_headers[i]+'-'+str(int(ul)-1) for i,ul in enumerate(bin_headers[1:])]
    headers = ['Full','Rare','Non-Rare'] + bin_headers

    sca = sample_complexity_analysis
    values = [sca['full'],sca['rare'],sca['non_rare']] + \
        [bin_map[name] for name in bin_names]
    values = [str(round(v*100,2)) for v in values]

    print('Space delimited values that can be copied to spreadsheet and split by space')
    print(' '.join(headers))
    print(' '.join(values))
예제 #8
0
def main():
    data_const = HicoConstants()

    hico_list = io.load_json_object(data_const.anno_list_json)
    global_ids = [anno['global_id'] for anno in hico_list]

    # Create and save splits
    split_ids = split(global_ids, 0.2)

    split_ids_json = os.path.join(data_const.proc_dir, 'split_ids.json')
    io.dump_json_object(split_ids, split_ids_json)

    # Create and save split stats
    split_stats = {}
    for subset_name, subset_ids in split_ids.items():
        split_stats[subset_name] = len(subset_ids)
        print(f'{subset_name}: {len(subset_ids)}')

    split_stats_json = os.path.join(data_const.proc_dir,
                                    'split_ids_stats.json')
    io.dump_json_object(split_stats, split_stats_json)
예제 #9
0
def exp_select_and_evaluate_confident_boxes_in_hico():
    exp_name = 'select_confident_boxes_in_hico'
    exp_const = ExpConstants(exp_name=exp_name)
    exp_const.background_score_thresh = 0.01
    exp_const.max_humans = 10
    exp_const.max_objects_per_class = 10
    exp_const.max_background = 10
    exp_const.iou_thresh = 0.5

    data_const = HicoConstants()

    human_score_thresholds = [0.01]  # [0.01,0.05,0.1,0.5]
    object_score_thresholds = [0.01]  # [0.01,0.05,0.1,0.5]

    for human_score_thresh in human_score_thresholds:
        for object_score_thresh in object_score_thresholds:
            exp_const.human_score_thresh = human_score_thresh
            exp_const.object_score_thresh = object_score_thresh

            # select_confident_boxes.select(exp_const,data_const)
            evaluate_boxes.evaluate_boxes(exp_const, data_const)
            evaluate_boxes.evaluate_boxes_and_labels(exp_const, data_const)
예제 #10
0
def main():
    data_const = HicoConstants()
    anno_list = io.load_json_object(data_const.anno_list_json)
    hoi_cls_count = {}
    for anno in tqdm(anno_list):
        if 'test' in anno['global_id']:
            continue

        for hoi in anno['hois']:
            hoi_id = hoi['id']
            if hoi_id not in hoi_cls_count:
                hoi_cls_count[hoi_id] = 0
            hoi_cls_count[hoi_id] += len(hoi['connections'])

    upper_limits = [10, 50, 100, 500, 1000, 10000]
    bin_to_hoi_ids = bin_hoi_ids(hoi_cls_count, upper_limits)

    hoi_cls_count_json = os.path.join(data_const.proc_dir,
                                      'hoi_cls_count.json')
    io.dump_json_object(hoi_cls_count, hoi_cls_count_json)

    bin_to_hoi_ids_json = os.path.join(data_const.proc_dir,
                                       'bin_to_hoi_ids.json')
    io.dump_json_object(bin_to_hoi_ids, bin_to_hoi_ids_json)
예제 #11
0
def exp_detect_coco_objects_in_hico():
    exp_const = ExpConstants(exp_name='detect_coco_objects_in_hico')
    data_const = HicoConstants()
    _exp_detect_coco_objects(exp_const, data_const)
예제 #12
0
def exp_cache_pose_feats_hico():
    exp_name = 'hoi_candidates'
    exp_const = ExpConstants(exp_name=exp_name)
    data_const = HicoConstants()
    _exp_cache_pose_feats(exp_const, data_const)
예제 #13
0
def exp_assign_pose_to_human_cand_hico():
    exp_name = 'hoi_candidates'
    exp_const = ExpConstants(exp_name=exp_name)
    data_const = HicoConstants()
    _exp_assign_pose_to_human_cand(exp_const, data_const)
예제 #14
0
            rpn_id = human_rpn_ids[i]
            if rpn_id in seen_rpn_ids:
                continue
            else:
                seen_rpn_ids.add(rpn_id)
        
            img = bbox_utils.vis_human_keypts(img,keypts[i],modify=True)

            img_out_path = os.path.join(
                exp_const.exp_dir,
                f'{global_id}.png')
            skio.imsave(img_out_path,img)


if __name__=='__main__':
    exp_const = ExpConstants(exp_name='vis_human_pose')
    exp_const.max_count = 100

    data_const = HicoConstants()
    hoi_cand_dir = os.path.join(
        os.getcwd(),
        'data_symlinks/hico_exp/hoi_candidates')
    data_const.human_pose_feats_h5py = os.path.join(
        hoi_cand_dir,
        'human_pose_feats_test.hdf5')
    data_const.hoi_cand_h5py = os.path.join(
        hoi_cand_dir,
        'hoi_candidates_test.hdf5')
    data_const.num_keypts = 18

    main(exp_const,data_const)
예제 #15
0
        return anno_list

    def convert(self):
        print('Creating anno list ...')
        anno_list = self.create_anno_list()
        io.dump_json_object(anno_list, self.const.anno_list_json)

        print('Creating hoi list ...')
        hoi_list = self.create_hoi_list()
        io.dump_json_object(hoi_list, self.const.hoi_list_json)

        print('Creating object list ...')
        object_list = sorted(list(set([hoi['object'] for hoi in hoi_list])))
        for i, obj in enumerate(object_list):
            object_list[i] = {'id': str(i + 1).zfill(3), 'name': obj}

        io.dump_json_object(object_list, self.const.object_list_json)

        print('Creating verb list ...')
        verb_list = sorted(list(set([hoi['verb'] for hoi in hoi_list])))
        for i, verb in enumerate(verb_list):
            verb_list[i] = {'id': str(i + 1).zfill(3), 'name': verb}

        io.dump_json_object(verb_list, self.const.verb_list_json)


if __name__ == '__main__':
    hico_const = HicoConstants()
    converter = ConvertMat2Json(hico_const)
    converter.convert()
def main():
    exp_name = 'factors_101_glove_generalize3_FC2_MTLv1_MoE_distillation2_600'
    exp_dir = os.path.join(
        os.getcwd(),
        f'data_symlinks/hico_exp/hoi_classifier/{exp_name}')
    
    map_json = os.path.join(
        exp_dir,
        'mAP_eval/test_65000/mAP.json')
    
    hoi_aps = io.load_json_object(map_json)['AP']
    
    data_const = HicoConstants()
    hoi_list = io.load_json_object(data_const.hoi_list_json)
    
    obj_to_hoi_id = {}
    for hoi in hoi_list:
        hoi_id = hoi['id']
        obj = hoi['object']
        if obj not in obj_to_hoi_id:
            obj_to_hoi_id[obj] = []   
        obj_to_hoi_id[obj].append(hoi_id)

    obj_aps = []
    for obj in obj_to_hoi_id.keys():
        obj_aps.append((obj,COCO_CLS_TO_FASTER_RCNN_AP[obj]))

    obj_aps = sorted(obj_aps,key=lambda x:x[1])

    per_obj_hoi_aps = []
    for obj, obj_ap in obj_aps:
        obj_interaction_aps = []
        for hoi_id in obj_to_hoi_id[obj]:
            obj_interaction_aps.append(hoi_aps[hoi_id]*100)

        per_obj_hoi_aps.append((obj,obj_interaction_aps))

    per_obj_hoi_aps = sorted(per_obj_hoi_aps,key=lambda x: np.median(x[1]))

    N = len(per_obj_hoi_aps)
    c = ['hsl('+str(h)+',50%'+',50%)' for h in np.linspace(0, 360, N)]
    data = []
    obj_aps_x = []
    obj_aps_y = []
    for i, (obj,aps) in enumerate(per_obj_hoi_aps): 
        trace = go.Box(
            y=aps, 
            name=" ".join(obj.split("_")),
            boxpoints=False, #"outliers"
            marker={'color': c[i]},
            line={'width':1}
        )
        data.append(trace)
        obj_aps_x.append(" ".join(obj.split("_")))
        obj_aps_y.append(COCO_CLS_TO_FASTER_RCNN_AP[obj]*100)

    line_char_trace = go.Scatter(
        x = obj_aps_x,
        y = obj_aps_y,
        mode = 'lines+markers',
        line = dict(
            color = ('rgba(150, 150, 200, 1)'),
            width = 1,),
            #dash = 'dash'),
        marker=dict(size=4))
    #data.append(line_char_trace)

    layout = go.Layout(
        yaxis=dict(
            title='AP of HOI Categories',
            range=[0,100],
        ),
        xaxis=dict(
            title='Objects',
            tickangle=45,
            tickfont=dict(
                size=12,
            ),
        ),
        height=500,
        margin=go.Margin(
            l=100,
            r=100,
            b=150,
            t=50,
        )
    )

    filename = os.path.join(exp_dir,'vis/interaction_aps_per_object.html')
    plotly.offline.plot(
        {'data': data, 'layout': layout},
        filename=filename,
        auto_open=False)

    corr_x = []
    corr_y = []
    for i, (obj,aps) in enumerate(per_obj_hoi_aps):
        obj_ap = COCO_CLS_TO_FASTER_RCNN_AP[obj]*100
        for hoi_ap in aps:
            corr_x.append(obj_ap)
            corr_y.append(hoi_ap)
    
    corr_trace = go.Scatter(
        x = corr_x,
        y = corr_y,
        mode = 'markers',
        marker = dict(
            size = 8,
            color = 'rgba(255, 182, 193, .8)',
            line = dict(
                width = 2,
                color = 'rgba(100, 0, 0, 1)'
            )
        )
    )

    corr_layout = go.Layout(
        yaxis=dict(
            title='AP of HOI Categories',
            range=[0,100],
        ),
        xaxis=dict(
            title='AP of Object Categories',
            range=[0,100],
        ),
        height=800,
        width=800,
        margin=go.Margin(
            l=100,
            r=100,
            b=150,
            t=50,
        )
    )

    filename = os.path.join(exp_dir,'vis/hoi_ap_vs_obj_ap.html')
    plotly.offline.plot(
        {'data': [corr_trace], 'layout': corr_layout},
        filename=filename,
        auto_open=False)
예제 #17
0
def exp_select_and_evaluate_confident_boxes_in_hico():
    exp_name = 'select_confident_boxes_in_hico'
    exp_const = ExpConstants(exp_name=exp_name)
    data_const = HicoConstants()
    _exp_select_and_evaluate_confident_boxes(exp_const, data_const)
def main():
    exp_name = 'factors_rcnn_det_prob_appearance_boxes_and_object_label_human_pose'
    exp_dir = os.path.join(
        os.getcwd(), f'data_symlinks/hico_exp/hoi_classifier/{exp_name}')

    map_json = os.path.join(exp_dir, 'mAP_eval/test_30000/mAP.json')

    hoi_aps = io.load_json_object(map_json)['AP']

    data_const = HicoConstants()
    hoi_list = io.load_json_object(data_const.hoi_list_json)

    verb_to_hoi_id = {}
    for hoi in hoi_list:
        hoi_id = hoi['id']
        verb = hoi['verb']
        if verb not in verb_to_hoi_id:
            verb_to_hoi_id[verb] = []
        verb_to_hoi_id[verb].append(hoi_id)

    per_verb_hoi_aps = []
    for verb, hoi_ids in verb_to_hoi_id.items():
        verb_obj_aps = []
        for hoi_id in hoi_ids:
            verb_obj_aps.append(hoi_aps[hoi_id] * 100)

        per_verb_hoi_aps.append((verb, verb_obj_aps))

    per_verb_hoi_aps = sorted(per_verb_hoi_aps, key=lambda x: np.median(x[1]))

    N = len(per_verb_hoi_aps)
    c = ['hsl(' + str(h) + ',50%' + ',50%)' for h in np.linspace(0, 360, N)]
    data = []
    for i, (verb, aps) in enumerate(per_verb_hoi_aps):
        trace = go.Box(
            y=aps,
            name=" ".join(verb.split("_")),
            boxpoints=False,  #"outliers"
            marker={'color': c[i]},
            line={'width': 1})
        data.append(trace)

    layout = go.Layout(
        yaxis=dict(
            title='AP of HOI Categories',
            range=[0, 100],
        ),
        xaxis=dict(
            title='Interactions',
            tickangle=45,
            tickfont=dict(size=8, ),
        ),
        height=500,
        margin=go.Margin(
            l=100,
            r=100,
            b=150,
            t=50,
        ),
    )

    filename = os.path.join(exp_dir, 'vis/obj_aps_per_interaction.html')
    plotly.offline.plot({
        'data': data,
        'layout': layout
    },
                        filename=filename,
                        auto_open=False)