Esempio n. 1
0
def make_mpii():
    joint_names = 'rank,rkne,rhip,lhip,lkne,lank,pelv,thor,neck,head,rwri,relb,rsho,lsho,lelb,lwri'
    edges = 'lsho-lelb-lwri,rsho-relb-rwri,lhip-lkne-lank,rhip-rkne-rank,neck-head,pelv-thor'
    joint_info_full = JointInfo(joint_names, edges)

    joint_names_used = 'rank,rkne,rhip,lhip,lkne,lank,rwri,relb,rsho,lsho,lelb,lwri'
    joint_info_used = JointInfo(joint_names_used, edges)
    dataset = Pose2DDataset(joint_info_used)
    selected_joints = [joint_info_full.ids[name] for name in joint_info_used.names]

    mat_path = f'{paths.DATA_ROOT}/mpii/mpii_human_pose_v1_u12_1.mat'
    s = matlabfile.load(mat_path).RELEASE
    annolist = np.atleast_1d(s.annolist)
    pool = util.BoundedPool(None, 120)

    for anno, is_train, rect_ids in zip(annolist, util.progressbar(s.img_train), s.single_person):
        if not is_train:
            continue

        image_path = f'mpii/images/{anno.image.name}'
        annorect = np.atleast_1d(anno.annorect)
        rect_ids = np.atleast_1d(rect_ids) - 1

        for rect_id in rect_ids:
            rect = annorect[rect_id]
            if 'annopoints' not in rect or len(rect.annopoints) == 0:
                continue

            coords = np.full(
                shape=[joint_info_full.n_joints, 2], fill_value=np.nan, dtype=np.float32)
            for joint in np.atleast_1d(rect.annopoints.point):
                coords[joint.id] = [joint.x, joint.y]

            coords = coords[selected_joints]
            rough_person_center = np.float32([rect.objpos.x, rect.objpos.y])
            rough_person_size = rect.scale * 200

            # Shift person center down like [Sun et al. 2018], who say this is common on MPII
            rough_person_center[1] += 0.075 * rough_person_size

            topleft = np.array(rough_person_center) - np.array(rough_person_size) / 2
            bbox = np.array([topleft[0], topleft[1], rough_person_size, rough_person_size])
            ex = Pose2DExample(image_path, coords, bbox=bbox)
            new_im_path = image_path.replace('mpii', 'mpii_downscaled')
            without_ext, ext = os.path.splitext(new_im_path)
            new_im_path = f'{without_ext}_{rect_id:02d}{ext}'
            pool.apply_async(
                make_efficient_example, (ex, new_im_path), callback=dataset.examples[TRAIN].append)

    print('Waiting for tasks...')
    pool.close()
    pool.join()
    print('Done...')
    dataset.examples[TRAIN].sort(key=lambda x: x.image_path)
    return dataset
Esempio n. 2
0
def make_coco_reduced(single_person=False, face=True):
    joint_names = 'rank,rkne,rhip,lhip,lkne,lank,rwri,relb,lelb,lwri'
    if face:
        joint_names += ',nose,leye,reye,lear,rear'

    edges = 'lelb-lwri,relb-rwri,lhip-lkne-lank,rhip-rkne-rank'
    joint_info = JointInfo(joint_names, edges)
    ds = data.joint_filtering.convert_dataset(make_coco(single_person), joint_info)

    body_joint_names = 'rank,rkne,rhip,lhip,lkne,lank,rwri,relb,lelb,lwri'.split(',')
    body_joint_ids = [joint_info.ids[name] for name in body_joint_names]

    def n_valid_body_joints(example):
        return np.count_nonzero(
            np.all(~np.isnan(example.coords[body_joint_ids]), axis=-1))

    ds.examples[TRAIN] = [ex for ex in ds.examples[TRAIN] if n_valid_body_joints(ex) > 6]
    return ds
Esempio n. 3
0
def make_many():
    joint_names = [
        'lhip', 'rhip', 'bell', 'lkne', 'rkne', 'spin', 'lank', 'rank', 'thor',
        'ltoe', 'rtoe', 'neck', 'lcla', 'rcla', 'head', 'lsho', 'rsho', 'lelb',
        'relb', 'lwri', 'rwri', 'lhan', 'rhan', 'pelv', 'head_h36m',
        'head_muco', 'head_sailvos', 'htop_h36m', 'htop_muco', 'htop_sailvos',
        'lcla_muco', 'lear', 'leye', 'lfin_h36m', 'lfoo_h36m', 'lfoo_muco',
        'lhan_muco', 'lhip_cmu_panoptic', 'lhip_h36m', 'lhip_muco',
        'lhip_sailvos', 'lsho_cmu_panoptic', 'lsho_h36m', 'lsho_muco',
        'lsho_sailvos', 'lthu_h36m', 'neck_cmu_panoptic', 'neck_h36m',
        'neck_muco', 'neck_sailvos', 'nose', 'pelv_cmu_panoptic', 'pelv_h36m',
        'pelv_muco', 'pelv_sailvos', 'rcla_muco', 'rear', 'reye', 'rfin_h36m',
        'rfoo_h36m', 'rfoo_muco', 'rhan_muco', 'rhip_cmu_panoptic',
        'rhip_h36m', 'rhip_muco', 'rhip_sailvos', 'rsho_cmu_panoptic',
        'rsho_h36m', 'rsho_muco', 'rsho_sailvos', 'rthu_h36m', 'spi2_muco',
        'spi4_muco'
    ]
    edges = [(0, 3), (0, 23), (1, 4), (1, 23), (2, 5), (2, 23), (3, 6),
             (3, 37), (3, 38), (3, 39), (3, 40), (4, 7), (4, 62), (4, 63),
             (4, 64), (4, 65), (5, 8), (5, 47), (5, 49), (5, 52), (5, 53),
             (5, 54), (5, 71), (6, 9), (6, 34), (6, 35), (7, 10), (7, 59),
             (7, 60), (8, 11), (8, 71), (8, 72), (9, 34), (9, 35), (10, 59),
             (10, 60), (11, 12), (11, 13), (11, 14), (12, 15), (13, 16),
             (15, 17), (16, 18), (17, 19), (17, 41), (17, 42), (17, 43),
             (17, 44), (18, 20), (18, 66), (18, 67), (18, 68), (18, 69),
             (19, 21), (19, 33), (19, 36), (19, 45), (20, 22), (20, 58),
             (20, 61), (20, 70), (24, 27), (24, 47), (25, 28), (25, 48),
             (26, 29), (26, 49), (30, 43), (30, 48), (31, 32), (32, 50),
             (37, 51), (38, 52), (39, 53), (40, 54), (41, 46), (42, 47),
             (44, 49), (46, 50), (46, 51), (46, 66), (47, 67), (48, 55),
             (48, 72), (49, 69), (50, 57), (51, 62), (52, 63), (53, 64),
             (54, 65), (55, 68), (56, 57)]
    joint_info = JointInfo(joint_names, edges)
    import imageio
    import tempfile
    import cameralib
    _, image_path = tempfile.mkstemp(suffix='.jpg')
    imageio.imwrite(image_path, np.zeros((256, 256), dtype=np.uint8))
    dummy_example = Pose3DExample(
        image_path, np.zeros((joint_info.n_joints, 3), np.float32),
        [0, 0, 256, 256], cameralib.Camera())
    return Pose3DDataset(joint_info, [dummy_example], [dummy_example],
                         [dummy_example])
Esempio n. 4
0
def make_mpii_yolo():
    joint_info_full = JointInfo(
        'rank,rkne,rhip,lhip,lkne,lank,pelv,thor,neck,head,rwri,relb,rsho,lsho,lelb,lwri',
        'lsho-lelb-lwri,rsho-relb-rwri,lhip-lkne-lank,rhip-rkne-rank,neck-head,pelv-thor')
    joint_info_used = JointInfo(
        'rank,rkne,rhip,lhip,lkne,lank,rwri,relb,lelb,lwri',
        'lelb-lwri,relb-rwri,lhip-lkne-lank,rhip-rkne-rank')
    selected_joints = [joint_info_full.ids[name] for name in joint_info_used.names]

    mat_path = f'{paths.DATA_ROOT}/mpii/mpii_human_pose_v1_u12_1.mat'
    s = matlabfile.load(mat_path).RELEASE
    annolist = np.atleast_1d(s.annolist)
    all_boxes = util.load_pickle(f'{paths.DATA_ROOT}/mpii/yolov3_detections.pkl')

    examples = []
    with util.BoundedPool(None, 120) as pool:
        for anno_id, (anno, is_train) in enumerate(
                zip(annolist, util.progressbar(s.img_train))):
            if not is_train:
                continue

            image_path = f'{paths.DATA_ROOT}/mpii/images/{anno.image.name}'

            annorect = np.atleast_1d(anno.annorect)
            gt_people = []
            for rect_id, rect in enumerate(annorect):
                if 'annopoints' not in rect or len(rect.annopoints) == 0:
                    continue

                coords = np.full(
                    shape=[joint_info_full.n_joints, 2], fill_value=np.nan, dtype=np.float32)
                for joint in np.atleast_1d(rect.annopoints.point):
                    coords[joint.id] = [joint.x, joint.y]

                bbox = boxlib.expand(boxlib.bb_of_points(coords), 1.25)
                coords = coords[selected_joints]
                ex = Pose2DExample(image_path, coords, bbox=bbox)
                gt_people.append(ex)

            if not gt_people:
                continue

            image_relpath = os.path.relpath(f'images/{anno.image.name}')
            boxes = [box for box in all_boxes[image_relpath] if box[-1] > 0.5]
            if not boxes:
                continue

            iou_matrix = np.array([[boxlib.iou(gt_person.bbox, box[:4])
                                    for box in boxes]
                                   for gt_person in gt_people])
            gt_indices, box_indices = scipy.optimize.linear_sum_assignment(-iou_matrix)

            for i_gt, i_det in zip(gt_indices, box_indices):
                if iou_matrix[i_gt, i_det] > 0.1:
                    ex = gt_people[i_gt]
                    ex.bbox = np.array(boxes[i_det][:4])
                    new_im_path = image_path.replace('mpii', 'mpii_downscaled_yolo')
                    without_ext, ext = os.path.splitext(new_im_path)
                    new_im_path = f'{without_ext}_{i_gt:02d}{ext}'
                    pool.apply_async(make_efficient_example, (ex, new_im_path),
                                     callback=examples.append)

    examples.sort(key=lambda ex: ex.image_path)

    def n_valid_joints(example):
        return np.count_nonzero(np.all(~np.isnan(example.coords), axis=-1))

    examples = [ex for ex in examples if n_valid_joints(ex) > 6]

    return Pose2DDataset(joint_info_used, examples)
Esempio n. 5
0
def make_coco(single_person=True):
    joint_info = JointInfo(
        'nose,leye,reye,lear,rear,lsho,rsho,lelb,relb,lwri,rwri,lhip,rhip,lkne,rkne,lank,rank',
        'lsho-lelb-lwri,rsho-relb-rwri,lhip-lkne-lank,rhip-rkne-rank,lear-leye-nose-reye-rear')
    n_joints = joint_info.n_joints
    learning_phase_shortnames = {TRAIN: 'train', VALID: 'val', TEST: 'test'}
    UNLABELED = 0
    OCCLUDED = 1
    VISIBLE = 2
    iou_threshold = 0.1 if single_person else 0.5

    suffix = '' if single_person else '_multi'
    examples_per_phase = {TRAIN: [], VALID: []}
    with util.BoundedPool(None, 120) as pool:
        for example_phase in (TRAIN, VALID):
            phase_shortname = learning_phase_shortnames[example_phase]
            coco_filepath = (
                f'{paths.DATA_ROOT}/coco/annotations/person_keypoints_{phase_shortname}2014.json')
            coco = pycocotools.coco.COCO(coco_filepath)

            impath_to_examples = {}
            for ann in coco.anns.values():
                filename = coco.imgs[ann['image_id']]['file_name']
                image_path = f'{paths.DATA_ROOT}/coco/{phase_shortname}2014/{filename}'

                joints = np.array(ann['keypoints']).reshape([-1, 3])
                visibilities = joints[:, 2]
                coords = joints[:, :2].astype(np.float32).copy()
                n_visible_joints = np.count_nonzero(visibilities == VISIBLE)
                n_occluded_joints = np.count_nonzero(visibilities == OCCLUDED)
                n_labeled_joints = n_occluded_joints + n_visible_joints

                if n_visible_joints >= n_joints / 3 and n_labeled_joints >= n_joints / 2:
                    coords[visibilities == UNLABELED] = np.nan
                    bbox_pt1 = np.array(ann['bbox'][0:2], np.float32)
                    bbox_wh = np.array(ann['bbox'][2:4], np.float32)
                    bbox = np.array([*bbox_pt1, *bbox_wh])
                    ex = Pose2DExample(image_path, coords, bbox=bbox)
                    impath_to_examples.setdefault(image_path, []).append(ex)

            n_images = len(impath_to_examples)
            for impath, examples in util.progressbar(impath_to_examples.items(), total=n_images):
                for i_example, example in enumerate(examples):
                    box = boxlib.expand(boxlib.bb_of_points(example.coords), 1.25)
                    if np.max(box[2:]) < 200:
                        continue

                    if single_person:
                        other_boxes = [boxlib.expand(boxlib.bb_of_points(e.coords), 1.25)
                                       for e in examples if e is not example]
                        ious = np.array([boxlib.iou(b, box) for b in other_boxes])
                        usable = np.all(ious < iou_threshold)
                    else:
                        usable = True

                    if usable:
                        new_im_path = impath.replace('coco', 'coco_downscaled' + suffix)
                        without_ext, ext = os.path.splitext(new_im_path)
                        new_im_path = f'{without_ext}_{i_example:02d}{ext}'
                        pool.apply_async(
                            make_efficient_example, (example, new_im_path),
                            callback=examples_per_phase[example_phase].append)

    examples_per_phase[TRAIN].sort(key=lambda ex: ex.image_path)
    examples_per_phase[VALID].sort(key=lambda ex: ex.image_path)
    return Pose2DDataset(joint_info, examples_per_phase[TRAIN], examples_per_phase[VALID])