Esempio n. 1
0
    def __call__(self, results):
        image_size = results['ann_info']['image_size']

        img = results['img']
        joints_3d = results['joints_3d']
        joints_3d_visible = results['joints_3d_visible']
        c = results['center']
        s = results['scale']
        r = results['rotation']

        if self.use_udp:
            trans = get_warp_matrix(r, c * 2.0, image_size - 1.0, s * 200.0)
            img = cv2.warpAffine(
                img,
                trans, (int(image_size[0]), int(image_size[1])),
                flags=cv2.INTER_LINEAR)
            joints_3d[:, 0:2] = \
                warp_affine_joints(joints_3d[:, 0:2].copy(), trans)
        else:
            trans = get_affine_transform(c, s, r, image_size)
            img = cv2.warpAffine(
                img,
                trans, (int(image_size[0]), int(image_size[1])),
                flags=cv2.INTER_LINEAR)
            for i in range(results['ann_info']['num_joints']):
                if joints_3d_visible[i, 0] > 0.0:
                    joints_3d[i,
                              0:2] = affine_transform(joints_3d[i, 0:2], trans)

        results['img'] = img
        results['joints_3d'] = joints_3d
        results['joints_3d_visible'] = joints_3d_visible

        return results
Esempio n. 2
0
    def __call__(self, results):
        image_size = results['ann_info']['image_size']

        img = results['img']
        joints_3d = results['joints_3d']
        joints_3d_visible = results['joints_3d_visible']
        c = results['center']
        s = results['scale']
        r = results['rotation']
        trans = get_affine_transform(c, s, r, image_size)

        img = cv2.warpAffine(
            img,
            trans, (int(image_size[0]), int(image_size[1])),
            flags=cv2.INTER_LINEAR)

        for i in range(results['ann_info']['num_joints']):
            if joints_3d_visible[i, 0] > 0.0:
                joints_3d[i, 0:2] = affine_transform(joints_3d[i, 0:2], trans)

        results['img'] = img
        results['joints_3d'] = joints_3d
        results['joints_3d_visible'] = joints_3d_visible

        return results
Esempio n. 3
0
    def __call__(self, results):
        """Perform random affine transformation to joints coordinates."""

        c = results['center']
        s = results['scale'] / 200.0
        r = results['rotation']
        image_size = results['ann_info']['image_size']

        assert self.item in results
        joints = results[self.item]

        if self.visible_item is not None:
            assert self.visible_item in results
            joints_vis = results[self.visible_item]
        else:
            joints_vis = [np.ones_like(joints[0]) for _ in range(len(joints))]

        trans = get_affine_transform(c, s, r, image_size)
        nposes = len(joints)
        for n in range(nposes):
            for i in range(len(joints[0])):
                if joints_vis[n][i, 0] > 0.0:
                    joints[n][i,
                              0:2] = affine_transform(joints[n][i, 0:2], trans)
                    if (np.min(joints[n][i, :2]) < 0
                            or joints[n][i, 0] >= image_size[0]
                            or joints[n][i, 1] >= image_size[1]):
                        joints_vis[n][i, :] = 0

        results[self.item] = joints
        if self.visible_item is not None:
            results[self.visible_item] = joints_vis

        return results
Esempio n. 4
0
    def __call__(self, results):
        image_size = results['ann_info']['image_size']

        img = results['img']
        joints_2d = results['joints_2d']
        joints_2d_visible = results['joints_2d_visible']
        joints_3d = results['joints_3d']
        pose = results['pose']

        c = results['center']
        s = results['scale']
        r = results['rotation']
        trans = get_affine_transform(c, s, r, image_size)

        img = cv2.warpAffine(img,
                             trans, (int(image_size[0]), int(image_size[1])),
                             flags=cv2.INTER_LINEAR)

        for i in range(results['ann_info']['num_joints']):
            if joints_2d_visible[i, 0] > 0.0:
                joints_2d[i] = affine_transform(joints_2d[i], trans)

        joints_3d = _rotate_joints_3d(joints_3d, r)
        pose = _rotate_smpl_pose(pose, r)

        results['img'] = img
        results['joints_2d'] = joints_2d
        results['joints_2d_visible'] = joints_2d_visible
        results['joints_3d'] = joints_3d
        results['pose'] = pose

        if 'iuv' in results.keys():
            iuv = results['iuv']
            if iuv is not None:
                iuv_size = results['ann_info']['iuv_size']
                iuv = cv2.warpAffine(iuv,
                                     trans,
                                     (int(iuv_size[0]), int(iuv_size[1])),
                                     flags=cv2.INTER_NEAREST)
            results['iuv'] = iuv

        return results
Esempio n. 5
0
def _resize_align_multi_scale(image, input_size, current_scale, min_scale):
    """Resize the images for multi-scale training.

    Args:
        image: Input image
        input_size (int): Size of the image input
        current_scale (float): Current scale
        min_scale (float): Minimal scale

    Returns:
        image_resized (tuple): size of resize image
        center (np.ndarray): center of image
        scale (np.ndarray): scale
    """
    size_resized, center, scale = _get_multi_scale_size(
        image, input_size, current_scale, min_scale)

    trans = get_affine_transform(center, scale, 0, size_resized)
    image_resized = cv2.warpAffine(image, trans, size_resized)

    return image_resized, center, scale
Esempio n. 6
0
    def __call__(self, results):

        image_size = results['ann_info']['image_size']

        img = results['img']
        joints_3d = results['joints_3d']
        joints_3d_visible = results['joints_3d_visible']
        c = results['center']
        s = results['scale']
        r = results['rotation']

        if self.use_udp:
            trans = get_warp_matrix(r, c * 2.0, image_size - 1.0, s * 200.0)
            img = cv2.warpAffine(
                img,
                trans, (int(image_size[0]), int(image_size[1])),
                flags=cv2.INTER_LINEAR)
            joints_3d[:, 0:2] = \
                warp_affine_joints(joints_3d[:, 0:2].copy(), trans)
        else:
            trans = get_affine_transform(c, s, r, image_size)
            img = cv2.warpAffine(
                img,
                trans, (int(image_size[0]), int(image_size[1])),
                flags=cv2.INTER_LINEAR)
            for i in range(results['ann_info']['num_joints']):
                if joints_3d_visible[i, 0] > 0.0:
                    joints_3d[i,
                              0:2] = affine_transform(joints_3d[i, 0:2], trans)

                    
#         print('save image')
#         im_rgb = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
#         cv2.imwrite('ori_images/cropped_image'+str(c[0])+'.jpg', im_rgb)
        #time.sleep(3)
        results['img'] = img
        results['joints_3d'] = joints_3d
        results['joints_3d_visible'] = joints_3d_visible

        return results
Esempio n. 7
0
def _resize_align_multi_scale(image, input_size, current_scale, min_scale):
    """Resize the images for multi-scale training.

    Args:
        image: Input image
        input_size (np.ndarray[2]): Size (w, h) of the image input
        current_scale (float): Current scale
        min_scale (float): Minimal scale

    Returns:
        tuple: A tuple containing image info.

        - image_resized (np.ndarray): resized image
        - center (np.ndarray): center of image
        - scale (np.ndarray): scale
    """
    assert len(input_size) == 2
    size_resized, center, scale = _get_multi_scale_size(
        image, input_size, current_scale, min_scale)

    trans = get_affine_transform(center, scale, 0, size_resized)
    image_resized = cv2.warpAffine(image, trans, size_resized)

    return image_resized, center, scale
Esempio n. 8
0
    def __call__(self, results):
        """Perform data augmentation with random scaling & rotating."""
        image, mask, joints = results['img'], results['mask'], results[
            'joints']

        self.input_size = results['ann_info']['image_size']
        if not isinstance(self.input_size, np.ndarray):
            self.input_size = np.array(self.input_size)
        if self.input_size.size > 1:
            assert len(self.input_size) == 2
        else:
            self.input_size = [self.input_size, self.input_size]
        self.output_size = results['ann_info']['heatmap_size']

        assert isinstance(mask, list)
        assert isinstance(joints, list)
        assert len(mask) == len(joints)
        assert len(mask) == len(self.output_size), (len(mask),
                                                    len(self.output_size),
                                                    self.output_size)

        height, width = image.shape[:2]
        if self.use_udp:
            center = np.array(((width - 1.0) / 2, (height - 1.0) / 2))
        else:
            center = np.array((width / 2, height / 2))

        img_scale = np.array([width, height], dtype=np.float32)
        aug_scale = np.random.random() * (self.max_scale - self.min_scale) \
            + self.min_scale
        img_scale *= aug_scale
        aug_rot = (np.random.random() * 2 - 1) * self.max_rotation

        if self.trans_factor > 0:
            dx = np.random.randint(-self.trans_factor * img_scale[0] / 200.0,
                                   self.trans_factor * img_scale[0] / 200.0)
            dy = np.random.randint(-self.trans_factor * img_scale[1] / 200.0,
                                   self.trans_factor * img_scale[1] / 200.0)

            center[0] += dx
            center[1] += dy
        if self.use_udp:
            for i, _output_size in enumerate(self.output_size):
                if not isinstance(_output_size, np.ndarray):
                    _output_size = np.array(_output_size)
                if _output_size.size > 1:
                    assert len(_output_size) == 2
                else:
                    _output_size = [_output_size, _output_size]

                scale = self._get_scale(img_scale, _output_size)

                trans = get_warp_matrix(
                    theta=aug_rot,
                    size_input=center * 2.0,
                    size_dst=np.array(
                        (_output_size[0], _output_size[1]), dtype=np.float32) -
                    1.0,
                    size_target=scale)
                mask[i] = cv2.warpAffine(
                    (mask[i] * 255).astype(np.uint8),
                    trans, (int(_output_size[0]), int(_output_size[1])),
                    flags=cv2.INTER_LINEAR) / 255
                mask[i] = (mask[i] > 0.5).astype(np.float32)
                joints[i][:, :, 0:2] = \
                    warp_affine_joints(joints[i][:, :, 0:2].copy(), trans)
                if results['ann_info']['scale_aware_sigma']:
                    joints[i][:, :, 3] = joints[i][:, :, 3] / aug_scale
            scale = self._get_scale(img_scale, self.input_size)
            mat_input = get_warp_matrix(
                theta=aug_rot,
                size_input=center * 2.0,
                size_dst=np.array((self.input_size[0], self.input_size[1]),
                                  dtype=np.float32) - 1.0,
                size_target=scale)
            image = cv2.warpAffine(
                image,
                mat_input, (int(self.input_size[0]), int(self.input_size[1])),
                flags=cv2.INTER_LINEAR)
        else:
            for i, _output_size in enumerate(self.output_size):
                if not isinstance(_output_size, np.ndarray):
                    _output_size = np.array(_output_size)
                if _output_size.size > 1:
                    assert len(_output_size) == 2
                else:
                    _output_size = [_output_size, _output_size]
                scale = self._get_scale(img_scale, _output_size)
                mat_output = get_affine_transform(center=center,
                                                  scale=scale / 200.0,
                                                  rot=aug_rot,
                                                  output_size=_output_size)
                mask[i] = cv2.warpAffine(
                    (mask[i] * 255).astype(np.uint8), mat_output,
                    (int(_output_size[0]), int(_output_size[1]))) / 255
                mask[i] = (mask[i] > 0.5).astype(np.float32)

                joints[i][:, :, 0:2] = \
                    warp_affine_joints(joints[i][:, :, 0:2], mat_output)
                if results['ann_info']['scale_aware_sigma']:
                    joints[i][:, :, 3] = joints[i][:, :, 3] / aug_scale

            scale = self._get_scale(img_scale, self.input_size)
            mat_input = get_affine_transform(center=center,
                                             scale=scale / 200.0,
                                             rot=aug_rot,
                                             output_size=self.input_size)
            image = cv2.warpAffine(
                image, mat_input,
                (int(self.input_size[0]), int(self.input_size[1])))

        results['img'], results['mask'], results[
            'joints'] = image, mask, joints

        return results