def test_heatmaps_to_coords():
    coords, _ = heatmaps_to_coords(hm)
    assert torch.isclose(coords.squeeze().type(torch.int32), torch.from_numpy(kpts).type(torch.int32), atol=1).all()
    # test zeros only
    coords_zeros, _ = heatmaps_to_coords(hm_zeros)
    assert coords_zeros.type(torch.uint8).all() == 0

    coords, _ = heatmaps_to_coords(hm, 0.5)
    assert torch.isclose(coords.squeeze().type(torch.int32), torch.from_numpy(kpts).type(torch.int32), atol=1).all()
    # test zeros only
    coords_zeros, _ = heatmaps_to_coords(hm_zeros, 0.5)
    assert coords_zeros.type(torch.uint8).all() == 0
 def eval_op():
     coords, _ = heatmaps_to_coords(predictions.clone(),
                                    thresh=self.config["hm"]["thresh"])
     return {
         "labels": {
             "coords": np.array(coords.cpu()),
             "kps": kwargs["kps"]
         }
     }
Beispiel #3
0
        def log_op():

            PCK_THRESH = [0.01, 0.025, 0.05, 0.1, 0.125, 0.15, 0.175, 0.2, 0.25, 0.5]
            HM_THRESH = [0.01, 0.05, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1]
            if self.config['pck_alpha'] not in PCK_THRESH: PCK_THRESH.append(self.config["pck_alpha"])

            coords, _ = heatmaps_to_coords(predictions.clone(), thresh=self.config["hm"]["thresh"])
            pck = {t: percentage_correct_keypoints(kwargs["kps"], coords, t, self.config["pck"]["type"]) for t in
                   PCK_THRESH}

            gridded_outputs = np.expand_dims(sure_to_numpy(torchvision.utils.make_grid(
                predictions[0], nrow=10)), 0).transpose(1, 2, 3, 0)
            gridded_targets = np.expand_dims(sure_to_numpy(torchvision.utils.make_grid(
                sure_to_torch(kwargs["targets"])[0], nrow=10)), 0).transpose(1, 2, 3, 0)

            logs = {
                "images": {
                    # Image input not needed, because stick animal is printed on input image
                    # "image_input": adjust_support(torch2numpy(inputs).transpose(0, 2, 3, 1), "-1->1"),
                    "first_pred": adjust_support(gridded_outputs, "-1->1", "0->1"),
                    "first_targets": adjust_support(gridded_targets, "-1->1", "0->1"),
                    "outputs": adjust_support(heatmaps_to_image(torch2numpy(predictions)).transpose(0, 2, 3, 1),
                                              "-1->1", "0->1"),
                    "targets": adjust_support(heatmaps_to_image(kwargs["targets"]).transpose(0, 2, 3, 1), "-1->1"),
                    "inputs_with_stick": make_stickanimal(torch2numpy(inputs).transpose(0, 2, 3, 1), kwargs["kps"]),
                    "stickanimal": make_stickanimal(torch2numpy(inputs).transpose(0, 2, 3, 1), predictions.clone(),
                                                    thresh=self.config["hm"]["thresh"]),
                },
                "scalars": {
                    "loss": losses["total"],
                    "learning_rate": self.optimizer.state_dict()["param_groups"][0]["lr"],
                    f"PCK@{self.config['pck_alpha']}": np.around(pck[self.config['pck_alpha']][0], 5),
                },
                "figures": {
                    "Keypoint Mapping": plot_input_target_keypoints(torch2numpy(inputs).transpose(0, 2, 3, 1),
                                                                    # get BHWC
                                                                    torch2numpy(predictions),  # stay BCHW
                                                                    kwargs["kps"], coords),
                }
            }
            if self.config["losses"]["L2"]:
                logs["scalars"]["L2"] = losses["L2"]
            if self.config["losses"]["L1"]:
                logs["scalars"]["L1"] = losses["L1"]
            if self.config["losses"]["perceptual"]:
                logs["scalars"]["perceptual"] = losses["perceptual"]

            if self.config["pck"]["pck_multi"]:
                for key, val in pck.items():
                    # get mean value for pck at given threshold
                    logs["scalars"][f"PCK@{key}"] = np.around(val[0], 5)
                    for idx, part in enumerate(val[1]):
                        logs["scalars"][f"PCK@{key}_{self.dataset.get_idx_parts(idx)}"] = np.around(part, 5)
            return logs
def test_heatmaps_to_coords_thresholded():
    predictions = np.load("AnimalPose/tests/predictions_000414.npy")
    thresholds = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]
    for thresh in thresholds:
        coords, _ = heatmaps_to_coords(np.expand_dims(predictions, 0), thresh)
Beispiel #5
0
def make_stickanimal(image, predictions, thresh=0, draw_all_circles=True):
    """
    Args:
        image: batch of images [B, W, H, C]
        joints: joint array
        predictions: batch of prediction heatmaps [B, Joints, W, H]

    Returns:

    """
    image = adjust_support(np.copy(image), "0->255").astype(np.uint8)
    if predictions.shape[-1] != 2:
        # Predictions to Keypoints
        coords, _ = heatmaps_to_coords(torch2numpy(predictions), thresh=thresh)
    else:
        coords = predictions

    joints = [
        # Head
        [2, 0],  # Nose - L_Eye
        [2, 1],  # Nose - R_Eye
        [0, 3],  # L_Eye - L_EarBase
        [1, 4],  # R_Eye - R_EarBase
        [2, 8],  # Nose - Throat
        # Body
        [8, 9],  # Throat - L_F_Elbow
        [8, 5],  # Throat - R_F_Elbow
        [9, 10],  # L_F_Elbow - Withers
        [5, 10],  # R_F_Elbow - Withers
        # Front
        [9, 16],  # L_F_Elbow - L_F_Knee
        [16, 6],  # L_F_Knee - L_F_Paw
        [5, 17],  # R_F_Elbow - R_F_Knee
        [17, 7],  # R_F_Knee - R_F_Paw
        # Back
        [14, 18],  # L_B_Elbow - L_B_Knee
        [18, 12],  # L_B_Knee - L_B_Paw
        [15, 19],  # R_B_Elbow - R_B_Knee
        [19, 13],  # R_B_Knee - R_B_Paw
        [10, 11],  # Withers - TailBase
        [11, 15],  # Tailbase - R_B_Elbow
        [11, 14],  # Tailbase - L_B_Elbow
    ]
    #  BGR color such as: Blue = a, Green = b and Red = c
    head = (255, 0, 0)  # red
    body = (255, 255, 255)  # white
    front = (0, 255, 0)  # green
    back = (0, 0, 255)  # blue

    colordict = {
        0: head,
        1: head,
        2: head,
        3: head,
        4: head,
        5: body,
        6: body,
        7: body,
        8: body,
        9: front,
        10: front,
        11: front,
        12: front,
        13: back,
        14: back,
        15: back,
        16: back,
        17: back,
        18: back,
        19: back,
    }
    for idx, orig in enumerate(image):
        img = np.zeros((orig.shape[0], orig.shape[1], orig.shape[2]), np.uint8)
        img[:, :, :] = orig  # but why not img = orig.copy() ?
        for idx_joints, pair in enumerate(joints):
            start = coords[idx][pair[0]]
            end = coords[idx][pair[1]]

            # catch the case, that both points are missing (are 0,0) or we want to draw the circles
            if np.isclose(start, [0, 0]).any() and np.isclose(end, [0, 0]).any():
                continue
            # catch the case, that only one of them is missing
            if not np.isclose(start, [0, 0]).any() and draw_all_circles:
                cv2.circle(img, (int(start[0]), int(start[1])), radius=1,
                           color=colordict[idx_joints], thickness=2, lineType=cv2.LINE_AA)
            if not np.isclose(end, [0, 0]).any() and draw_all_circles:
                cv2.circle(img, (int(end[0]), int(end[1])), radius=1,
                           color=colordict[idx_joints], thickness=2, lineType=cv2.LINE_AA)

            if not np.isclose(start[0], 0):
                if not np.isclose(end[0], 0):
                    cv2.line(img, (int(start[0]), int(start[1])), (int(end[0]), int(end[1])),
                             color=colordict[idx_joints], thickness=1, lineType=cv2.LINE_AA)
        image[idx] = img

    return adjust_support(image, "-1->1")