コード例 #1
0
    def get_image(self, A_path, size, params, input_type):
        if input_type != 'openpose':
            A_img = Image.open(A_path).convert('RGB')
        else:
            random_drop_prob = self.opt.random_drop_prob if self.opt.isTrain else 0
            A_img = Image.fromarray(
                read_keypoints(A_path, size, random_drop_prob,
                               self.opt.remove_face_labels,
                               self.opt.basic_point_only))

        if input_type == 'densepose' and self.opt.isTrain:
            # randomly remove labels
            A_np = np.array(A_img)
            part_labels = A_np[:, :, 2]
            for part_id in range(1, 25):
                if (np.random.rand() < self.opt.random_drop_prob):
                    A_np[(part_labels == part_id), :] = 0
            if self.opt.remove_face_labels:
                A_np[(part_labels == 23) | (part_labels == 24), :] = 0
            A_img = Image.fromarray(A_np)

        is_img = input_type == 'img'
        method = Image.BICUBIC if is_img else Image.NEAREST
        transform_scaleA = get_transform(self.opt, params, method=method)
        A_scaled = transform_scaleA(A_img)
        return A_scaled
コード例 #2
0
    def get_image(self,
                  A_path,
                  size,
                  params,
                  crop_coords,
                  input_type,
                  ppl_idx=None,
                  op=None,
                  ref_face_pts=None):
        if A_path is None: return None, None
        opt = self.opt
        is_img = input_type == 'img'
        method = Image.BICUBIC if is_img else Image.NEAREST

        if input_type == 'openpose':
            # get image from openpose keypoints
            A_img, pose_pts, face_pts = read_keypoints(opt, A_path, size, 0,
                                                       False,
                                                       opt.basic_point_only,
                                                       opt.remove_face_labels,
                                                       ppl_idx, ref_face_pts)

            # randomly crop the image
            A_img, crop_coords = self.crop_person_region(
                A_img, crop_coords, pose_pts, size)

        else:
            A_img = self.read_data(A_path)
            A_img, _ = self.crop_person_region(A_img, crop_coords)
            if input_type == 'densepose':  # remove other ppl in the densepose map
                A_img = self.remove_other_ppl(A_img, A_path, crop_coords, op)

        transform_scaleA = get_transform(self.opt,
                                         params,
                                         method=method,
                                         color_aug=is_img and opt.isTrain)
        A_scaled = transform_scaleA(A_img).float()

        if input_type == 'densepose':  # renormalize the part labels
            A_scaled[2, :, :] = (
                (A_scaled[2, :, :] * 0.5 + 0.5) * 255 / 24 - 0.5) / 0.5

        if input_type == 'openpose':
            return A_scaled, A_img, crop_coords, face_pts
        return A_scaled
コード例 #3
0
ファイル: data_stage3.py プロジェクト: CV-IP/C2F-FWN
    def get_SImage(self, spose_path, sparsing_path, sfg_path, simg_path, size,
                   params, BigSizeFlag):
        SI = Image.open(simg_path).convert('RGB')
        if SI.size != (1920, 1080) and BigSizeFlag:
            SI = SI.resize((1920, 1080), Image.BICUBIC)
        elif not BigSizeFlag:
            SI = SI.resize((192, 256), Image.BICUBIC)
        SFG_np = np.array(SI)
        SFG_full_np = np.array(SI)

        random_drop_prob = self.opt.random_drop_prob if self.opt.isTrain else 0
        SPose_array, _ = read_keypoints(spose_path, size, random_drop_prob,
                                        self.opt.remove_face_labels,
                                        self.opt.basic_point_only)
        SPose = Image.fromarray(SPose_array)
        if SPose.size != (1920, 1080) and BigSizeFlag:
            SPose = SPose.resize((1920, 1080), Image.NEAREST)
        elif not BigSizeFlag:
            SPose = SPose.resize((192, 256), Image.NEAREST)
        SPose_np = np.array(SPose)

        SParsing = Image.open(sparsing_path)
        SParsing_size = SParsing.size
        if SParsing_size != (1920, 1080) and SParsing_size != (
                192, 256) and BigSizeFlag:
            SParsing = SParsing.resize((1920, 1080), Image.NEAREST)
        elif not BigSizeFlag and SParsing_size != (192, 256):
            SParsing = SParsing.resize((192, 256), Image.NEAREST)
        SParsing_np = np.array(SParsing)

        if SParsing_size == (192, 256):
            SParsing_new_np = SParsing_np
        else:
            SParsing_new_np = np.zeros_like(SParsing_np)
            SParsing_new_np[(SParsing_np == 3) | (SParsing_np == 5) |
                            (SParsing_np == 6) | (SParsing_np == 7) |
                            (SParsing_np == 11)] = 1
            SParsing_new_np[(SParsing_np == 8) | (SParsing_np == 9) |
                            (SParsing_np == 12)] = 2
            SParsing_new_np[(SParsing_np == 1) | (SParsing_np == 2)] = 3
            SParsing_new_np[(SParsing_np == 4) | (SParsing_np == 13)] = 4
            SParsing_new_np[(SParsing_np == 14)] = 5
            SParsing_new_np[(SParsing_np == 15)] = 6
            SParsing_new_np[(SParsing_np == 16)] = 7
            SParsing_new_np[(SParsing_np == 17)] = 8
            SParsing_new_np[(SParsing_np == 10)] = 9
            SParsing_new_np[(SParsing_np == 18)] = 10
            SParsing_new_np[(SParsing_np == 19)] = 11

        if BigSizeFlag:
            transform_scale = get_transform(self.opt,
                                            params,
                                            normalize=True,
                                            method=Image.NEAREST,
                                            color_aug=False)
        else:
            transform_scale = get_transform_fixed(self.opt,
                                                  params,
                                                  normalize=True,
                                                  method=Image.NEAREST,
                                                  color_aug=False)

        SPose_scaled = transform_scale(Image.fromarray(SPose_np))

        SParsing_new = Image.fromarray(SParsing_new_np)
        if SParsing_size != (192, 256) and BigSizeFlag:
            transform_scale = get_transform(self.opt,
                                            params,
                                            normalize=False,
                                            method=Image.NEAREST,
                                            color_aug=False)
        else:
            transform_scale = get_transform_fixed(self.opt,
                                                  params,
                                                  normalize=False,
                                                  method=Image.NEAREST,
                                                  color_aug=False)
        SParsing_scaled = transform_scale(SParsing_new) * 255.0

        if BigSizeFlag:
            transform_scale = get_transform(self.opt,
                                            params,
                                            normalize=True,
                                            method=Image.BICUBIC,
                                            color_aug=self.opt.color_aug)
        else:
            transform_scale = get_transform_fixed(self.opt,
                                                  params,
                                                  normalize=True,
                                                  method=Image.BICUBIC,
                                                  color_aug=self.opt.color_aug)
        SI_scaled = transform_scale(SI)
        SFG_full_np[(SParsing_new_np == 0)] = 0
        SFG_full_scaled = transform_scale(Image.fromarray(SFG_full_np))

        if SI.size != (192, 256) and BigSizeFlag:
            transform_scale = get_transform(self.opt,
                                            params,
                                            normalize=True,
                                            method=Image.BICUBIC,
                                            color_aug=self.opt.color_aug)
        else:
            transform_scale = get_transform_fixed(self.opt,
                                                  params,
                                                  normalize=True,
                                                  method=Image.BICUBIC,
                                                  color_aug=self.opt.color_aug)

        if SI.size != (192, 256):
            SFG_np[(SParsing_new_np != 1) & (SParsing_new_np != 2) &
                   (SParsing_new_np != 3)] = 0
        SFG_scaled = transform_scale(Image.fromarray(SFG_np))

        return SPose_scaled, SParsing_scaled, SFG_scaled, SFG_full_scaled, SI_scaled
コード例 #4
0
    def get_TImage(self, tpose_path, tparsing_path, timg_path, size, params,
                   BigSizeFlag):
        random_drop_prob = self.opt.random_drop_prob if self.opt.isTrain else 0
        TPose_array, translation = read_keypoints(tpose_path, size,
                                                  random_drop_prob,
                                                  self.opt.remove_face_labels,
                                                  self.opt.basic_point_only)
        TPose = Image.fromarray(TPose_array)
        if TPose.size != (1920, 1080) and BigSizeFlag:
            TPose = TPose.resize((1920, 1080), Image.NEAREST)
        elif not BigSizeFlag:
            TPose = TPose.resize((192, 256), Image.NEAREST)
        TPose_uncloth_tmp_np = np.array(TPose)
        TPose_cloth_tmp_np = np.array(TPose)

        TI = Image.open(timg_path).convert('RGB')
        if TI.size != (1920, 1080) and BigSizeFlag:
            TI = TI.resize((1920, 1080), Image.BICUBIC)
        elif not BigSizeFlag:
            TI = TI.resize((192, 256), Image.BICUBIC)
        TFG_tmp_np = np.array(TI)
        TFG_uncloth_tmp_np = np.array(TI)
        TFG_cloth_tmp_np = np.array(TI)

        TParsing = Image.open(tparsing_path)
        TParsing_size = TParsing.size
        if TParsing_size != (1920, 1080) and TParsing_size != (
                192, 256) and BigSizeFlag:
            TParsing = TParsing.resize((1920, 1080), Image.NEAREST)
        elif not BigSizeFlag and TParsing_size != (192, 256):
            TParsing = TParsing.resize((192, 256), Image.NEAREST)
        TParsing_np = np.array(TParsing)

        TParsing_new_np = np.zeros_like(TParsing_np)
        TParsing_new_np[(TParsing_np == 1) | (TParsing_np == 2)] = 1
        TParsing_new_np[(TParsing_np == 3) | (TParsing_np == 5) |
                        (TParsing_np == 6) | (TParsing_np == 7) |
                        (TParsing_np == 11)] = 2
        TParsing_new_np[(TParsing_np == 8) | (TParsing_np == 9) |
                        (TParsing_np == 12)] = 3
        TParsing_new_np[(TParsing_np == 4) | (TParsing_np == 13)] = 4
        TParsing_new_np[(TParsing_np == 14)] = 5
        TParsing_new_np[(TParsing_np == 15)] = 6
        TParsing_new_np[(TParsing_np == 16)] = 7
        TParsing_new_np[(TParsing_np == 17)] = 8
        TParsing_new_np[(TParsing_np == 10)] = 9
        TParsing_new_np[(TParsing_np == 18)] = 10
        TParsing_new_np[(TParsing_np == 19)] = 11

        TParsing_uncloth_np = np.zeros_like(TParsing_np)
        TParsing_uncloth_np[(TParsing_np == 1) | (TParsing_np == 2)] = 1
        TParsing_uncloth_np[(TParsing_np == 4) | (TParsing_np == 13)] = 2
        TParsing_uncloth_np[(TParsing_np == 14)] = 3
        TParsing_uncloth_np[(TParsing_np == 15)] = 4
        TParsing_uncloth_np[(TParsing_np == 16)] = 5
        TParsing_uncloth_np[(TParsing_np == 17)] = 6
        TParsing_uncloth_np[(TParsing_np == 10)] = 7
        TParsing_uncloth_np[(TParsing_np == 18)] = 8
        TParsing_uncloth_np[(TParsing_np == 19)] = 9

        TParsing_cloth_np = np.zeros_like(TParsing_new_np)
        TParsing_cloth_np[(TParsing_new_np == 2) | (TParsing_new_np == 3)] = \
        TParsing_new_np[(TParsing_new_np == 2) | (TParsing_new_np == 3)] - 1

        TParsing_new = Image.fromarray(TParsing_new_np)
        TParsing_uncloth = Image.fromarray(TParsing_uncloth_np)
        TParsing_cloth = Image.fromarray(TParsing_cloth_np)
        if TParsing_size != (192, 256) and BigSizeFlag:
            transform_scale = get_transform(self.opt,
                                            params,
                                            normalize=False,
                                            method=Image.NEAREST)
        else:
            transform_scale = get_transform_fixed(self.opt,
                                                  params,
                                                  normalize=False,
                                                  method=Image.NEAREST)
        TParsing_uncloth_scaled = transform_scale(TParsing_uncloth) * 255.0
        TParsing_cloth_scaled = transform_scale(TParsing_cloth) * 255.0
        TParsing_scaled = transform_scale(TParsing_new) * 255.0

        if BigSizeFlag:
            transform_scale = get_transform(self.opt,
                                            params,
                                            normalize=True,
                                            method=Image.NEAREST)
        else:
            transform_scale = get_transform_fixed(self.opt,
                                                  params,
                                                  normalize=True,
                                                  method=Image.NEAREST)

        TPose_scaled = transform_scale(TPose)

        TPose_uncloth_tmp_np[:, :, 0][(TParsing_uncloth_np == 0)] = 0
        TPose_uncloth_tmp_np[:, :, 1][(TParsing_uncloth_np == 0)] = 0
        TPose_uncloth_tmp_np[:, :, 2][(TParsing_uncloth_np == 0)] = 0
        TPose_uncloth_scaled = transform_scale(
            Image.fromarray(TPose_uncloth_tmp_np))

        TPose_cloth_tmp_np[:, :, 0][(TParsing_cloth_np == 0)] = 0
        TPose_cloth_tmp_np[:, :, 1][(TParsing_cloth_np == 0)] = 0
        TPose_cloth_tmp_np[:, :, 2][(TParsing_cloth_np == 0)] = 0
        TPose_cloth_scaled = transform_scale(
            Image.fromarray(TPose_cloth_tmp_np))

        if BigSizeFlag:
            transform_scale = get_transform(self.opt,
                                            params,
                                            normalize=True,
                                            method=Image.BICUBIC)
        else:
            transform_scale = get_transform_fixed(self.opt,
                                                  params,
                                                  normalize=True,
                                                  method=Image.BICUBIC)

        TFG_tmp_np[:, :, 0][(TParsing_new_np == 0)] = 0
        TFG_tmp_np[:, :, 1][(TParsing_new_np == 0)] = 0
        TFG_tmp_np[:, :, 2][(TParsing_new_np == 0)] = 0
        TFG_scaled = transform_scale(Image.fromarray(TFG_tmp_np))

        TFG_uncloth_tmp_np[:, :, 0][(TParsing_uncloth_np == 0)] = 0
        TFG_uncloth_tmp_np[:, :, 1][(TParsing_uncloth_np == 0)] = 0
        TFG_uncloth_tmp_np[:, :, 2][(TParsing_uncloth_np == 0)] = 0
        TFG_uncloth_scaled = transform_scale(
            Image.fromarray(TFG_uncloth_tmp_np))

        TFG_cloth_tmp_np[:, :, 0][(TParsing_cloth_np == 0)] = 0
        TFG_cloth_tmp_np[:, :, 1][(TParsing_cloth_np == 0)] = 0
        TFG_cloth_tmp_np[:, :, 2][(TParsing_cloth_np == 0)] = 0
        TFG_cloth_scaled = transform_scale(Image.fromarray(TFG_cloth_tmp_np))

        return TPose_scaled, TParsing_scaled, TFG_scaled, TPose_uncloth_scaled, TParsing_uncloth_scaled, TFG_uncloth_scaled, TPose_cloth_scaled, TParsing_cloth_scaled, TFG_cloth_scaled