Exemplo n.º 1
0
    def forward(self, preds, *args):
        gt_joints = args[0]
        gt_joints_vis = args[1]

        if self.output_3d:
            num_joints = int(gt_joints_vis.shape[1] / 3)
        else:
            num_joints = int(gt_joints_vis.shape[1] / 2)
        hm_width = preds.shape[-1]
        hm_height = preds.shape[-2]
        hm_depth = preds.shape[-3] // num_joints if self.output_3d else 1

        pred_jts, pred_scores = _integral_tensor(
            preds,
            num_joints,
            self.output_3d,
            hm_width,
            hm_height,
            hm_depth,
            integral_operation=self.integral_operation,
            norm_type=self.norm_type)

        _assert_no_grad(gt_joints)
        _assert_no_grad(gt_joints_vis)
        return weighted_l1_loss(pred_jts, pred_scores, gt_joints,
                                gt_joints_vis, self.size_average)
Exemplo n.º 2
0
    def forward(self, preds, *args):
        gt_joints = args[0]
        gt_joints_vis = args[1]
        #print("pred:",preds.shape)
        #[48*10,133,64,48]
        #print("gt_joints:",gt_joints.shape)
        #[48*10,266]
        #print("gt_joints_vis:",gt_joints_vis.shape)
        #[48*10,266]
        # gt_joints_vis_body = torch.zeros_like(gt_joints_vis)
        # gt_joints_vis_hand = torch.zeros_like(gt_joints_vis)
        # gt_joints_vis_face = torch.zeros_like(gt_joints_vis)
        # gt_joints_vis_body[:, :46] = gt_joints_vis[:, :46]
        # gt_joints_vis_face[:, 46:-84] = gt_joints_vis[:, 46:-84]
        # gt_joints_vis_hand[:, -84:] = gt_joints_vis[:, -84:]

        # gt_joints_vis_cat = torch.cat((gt_joints_vis_body,gt_joints_vis_face,gt_joints_vis_hand),dim=1)
        # gt_joints_cat = torch.cat((gt_joints,gt_joints,gt_joints),dim=1)

        # gt_joints_vis[:,:23*2] = gt_joints_vis[:,:23*2]*2
        # gt_joints_vis[:,23*2:(-42*2)] = gt_joints_vis[:,23*2:(-42*2)] * 0.5

        if self.output_3d:
            num_joints = int(gt_joints_vis.shape[1] / 3)
        else:
            num_joints = int(gt_joints_vis.shape[1] / 2)
        hm_width = preds.shape[-1]
        hm_height = preds.shape[-2]
        hm_depth = preds.shape[-3] // num_joints if self.output_3d else 1

        # num_joints= num_joints*3

        pred_jts, pred_scores = _integral_tensor(
            preds,
            num_joints,
            self.output_3d,
            hm_width,
            hm_height,
            hm_depth,
            integral_operation=self.integral_operation,
            norm_type=self.norm_type)
        #print("pred_jts:",pred_jts.shape)
        #[48*10,266]
        #print("pred_scores:",pred_scores.shape)
        #[48*10,133,1]

        _assert_no_grad(gt_joints)
        _assert_no_grad(gt_joints_vis)
        return weighted_l1_loss(pred_jts, pred_scores, gt_joints,
                                gt_joints_vis, self.size_average)
Exemplo n.º 3
0
def validate_gt(m, opt, cfg, heatmap_to_coord, batch_size=20):
    gt_val_dataset = builder.build_dataset(cfg.DATASET.VAL, preset_cfg=cfg.DATA_PRESET, train=False)
    eval_joints = gt_val_dataset.EVAL_JOINTS
    test_branch = cfg.OTHERS.get('TEST_BRANCH',True)

    gt_val_loader = torch.utils.data.DataLoader(
        gt_val_dataset, batch_size=batch_size, shuffle=False, num_workers=20, drop_last=False)
    kpt_json = []
    kpt_json_branch = []
    m.eval()

    norm_type = cfg.LOSS.get('NORM_TYPE', None)
    hm_size = cfg.DATA_PRESET.HEATMAP_SIZE

    for inps, labels, label_masks, img_ids, bboxes in tqdm(gt_val_loader, dynamic_ncols=True):
        if isinstance(inps, list):
            inps = [inp.cuda() for inp in inps]
        else:
            inps = inps.cuda()
        output,feature = m(inps)

        pred = copy.deepcopy(output)
        assert pred.dim() == 4
        pred = pred[:, eval_joints, :, :]

        for i in range(output.shape[0]):
            bbox = bboxes[i][0].tolist()
            pose_coords, pose_scores = heatmap_to_coord(
                pred[i][gt_val_dataset.EVAL_JOINTS], bbox, hm_shape=hm_size, norm_type=norm_type)

            keypoints = np.concatenate((pose_coords, pose_scores), axis=1)
            keypoints = keypoints.reshape(-1).tolist()

            data = dict()
            #data['bbox'] = bboxes[i, 0].tolist()
            data['bbox'] = bbox
            data['image_id'] = int(img_ids[i])
            data['score'] = float(np.mean(pose_scores) + np.max(pose_scores))
            data['category_id'] = 1
            data['keypoints'] = keypoints

            kpt_json.append(data)

        if test_branch:
            hm_height, hm_width = hm_size
            # regression the joints of wholeboy in stage1
            pred_jts, pred_score = _integral_tensor(
            pred, 133, False, hm_width, hm_height, 1, integral_operation=integral_op, norm_type='sigmoid')
            pred_jts = pred_jts.reshape(pred_jts.shape[0], 133, 2)

            # get the coords with the size of heatmap
            coords_x = (pred_jts[:, :, 0] + 0.5) * hm_width
            coords_y = (pred_jts[:, :, 1] + 0.5) * hm_height

            # get the box of hands for roi align
            lefthand_boxes = get_box_for_align(coords_x[:,-42:-21],coords_y[:,-42:-21])
            righthand_boxes = get_box_for_align(coords_x[:,-21:],coords_y[:,-21:])
            # stage2 testing
            fine_out = m.forward_branch(output, feature, lefthand_boxes, righthand_boxes)
            # output contains the finer and amplified hands kpts, need to apply aff
            fine_pred_jts, fine_pred_score = _integral_tensor(
            fine_out[:,-42:,:,:], 42, False, hm_width, hm_height, 1, integral_operation=integral_op, norm_type='sigmoid')
            fine_pred_jts = fine_pred_jts.reshape(fine_pred_jts.shape[0], 42, 2)
            

            lefthand_jts = fine_pred_jts[:,:21,:]
            righthand_jts = fine_pred_jts[:,21:,:]
            lefthand_jts[:,:,0] = (lefthand_jts[:,:,0]+0.5)*hm_width
            lefthand_jts[:,:,1] = (lefthand_jts[:,:,1]+0.5)*hm_height
            righthand_jts[:,:,0] = (righthand_jts[:,:,0]+0.5)*hm_width
            righthand_jts[:,:,1] = (righthand_jts[:,:,1]+0.5)*hm_height

            center_hm = np.array([hm_width/2.0,hm_height/2.0])
            scale_hm = np.array([hm_size[1],hm_size[0]])

            lefthand_kpts = copy.deepcopy(lefthand_jts.cpu().numpy().astype(np.float32))
            righthand_kpts = copy.deepcopy(righthand_jts.cpu().numpy().astype(np.float32))
            # apply affine trans to lefthand and add offset
            for j in range(lefthand_jts.shape[0]):
                box = lefthand_boxes[j].tolist()
                width = np.array(box[2] - box[0])
                height = np.array(box[3] - box[1])
                output_size = [box[2]-box[0],box[3]-box[1]]
                offset = np.array([box[0],box[1]])
                trans = get_affine_transform(center_hm,scale_hm,0,output_size) 
                for k in range(21):
                    lefthand_kpts[j ,k, 0:2] = affine_transform(lefthand_kpts[j ,k, 0:2], trans)
        
                lefthand_kpts[j,:,0] = (lefthand_kpts[j,:,0]) + offset[0]
                lefthand_kpts[j,:,1] = (lefthand_kpts[j,:,1])+ offset[1]
            #--------------------------------------------------
            # apply affine trans to righthand and add offset
            for j in range(righthand_jts.shape[0]):
                box = righthand_boxes[j].tolist()
                width = np.array(box[2] - box[0])
                height = np.array(box[3] - box[1])
                output_size = [box[2]-box[0],box[3]-box[1]]
                offset = np.array([box[0],box[1]])
                trans = get_affine_transform(center_hm,scale_hm,0,output_size)
                for k in range(21):
                    righthand_kpts[j,k, 0:2] = affine_transform(righthand_kpts[j ,k, 0:2], trans)
                
                righthand_kpts[j,:,0] = (righthand_kpts[j,:,0]) + offset[0]
                righthand_kpts[j,:,1] = (righthand_kpts[j,:,1]) + offset[1]
            #--------------------------------------------------

            bodyface_kpts = copy.deepcopy(pred_jts[:,:-42,:].cpu().numpy().astype(np.float32))
            bodyface_kpts[:,:,0] = (bodyface_kpts[:,:,0]+0.5)*hm_width
            bodyface_kpts[:,:,1] = (bodyface_kpts[:,:,1]+0.5)*hm_height

            fine_kpts = np.concatenate((bodyface_kpts,lefthand_kpts,righthand_kpts), axis=1)
            fine_socre = np.concatenate((pred_score[:,:-42,:].cpu().numpy(),fine_pred_score.cpu().numpy()), axis=1)
            
            for n in range(output.shape[0]):
                bbox = bboxes[n][0].tolist()
                xmin, ymin, xmax, ymax = bbox
                w = xmax - xmin
                h = ymax - ymin
                center = np.array([xmin + w * 0.5, ymin + h * 0.5])
                scale = np.array([w, h])
                for l in range(fine_kpts.shape[1]):
                    fine_kpts[n, l, 0:2] = transform_preds(fine_kpts[n, l, 0:2], center, scale,
                                               [hm_size[1],hm_size[0]])

                keypoints = np.concatenate((fine_kpts[n], fine_socre[n]), axis=1)
                keypoints = keypoints.reshape(-1).tolist()

                data_branch = dict()
                #data['bbox'] = bboxes[i, 0].tolist()
                data_branch['bbox'] = bbox
                data_branch['image_id'] = int(img_ids[n])
                data_branch['score'] = float(np.mean(fine_socre) + np.max(fine_socre))
                data_branch['category_id'] = 1
                data_branch['keypoints'] = keypoints
                kpt_json_branch.append(data_branch)
    

    with open(os.path.join(opt.work_dir, 'test_gt_kpt.json'), 'w') as fid:
        json.dump(kpt_json, fid)

    res = evaluate_mAP(os.path.join(opt.work_dir, 'test_gt_kpt.json'), ann_type='keypoints', ann_file='/ssd3/Benchmark/coco/annotations/coco_wholebody_val_133.json')#ann_file=os.path.join(cfg.DATASET.VAL.ROOT, cfg.DATASET.VAL.ANN))
        
    if test_branch:
        with open(os.path.join(opt.work_dir, 'test_gt_kpt_2branch.json'), 'w') as fid2:
            json.dump(kpt_json_branch, fid2)
        res_branch = evaluate_mAP(os.path.join(opt.work_dir, 'test_gt_kpt_2branch.json'), ann_type='keypoints', ann_file='/ssd3/Benchmark/coco/annotations/coco_wholebody_val_133.json')
        
        return res,res_branch
    else:
        return res, 0