Esempio n. 1
0
    def compute_cord_warp_batch(self, pair_df, validation=False):
        batch = [np.empty([self._batch_size] + [self.num_mask, 8]),
                 np.empty([self._batch_size, self.num_mask] + list(self._image_size)),
                 np.empty([self._batch_size] + [self.num_mask, 8]),
                 np.empty([self._batch_size, self.num_mask] + list(self._image_size)),
                 np.empty([self._batch_size] + [self.num_mask, 8]),
                 np.empty([self._batch_size, self.num_mask] + list(self._image_size))]
        i = 0
        for _, p in pair_df.iterrows():
            fr = self._annotations_file.loc[p['from']]
            to = self._annotations_file.loc[p['to']]
            kp_array1 = pose_utils.load_pose_cords_from_strings(fr['keypoints_y'],
                                                                fr['keypoints_x'])
            kp_array2 = pose_utils.load_pose_cords_from_strings(to['keypoints_y'],
                                                                to['keypoints_x'])
            if validation:
                npy_path_from = os.path.join(self._images_dir_test, p['from'])
                npy_path_from = npy_path_from[:-3]+'npy'
                npy_path_to = os.path.join(self._images_dir_test, p['to'])
                npy_path_to = npy_path_to[:-3] + 'npy'
            else:
                npy_path_from = os.path.join(self._images_dir_train, p['from'])
                npy_path_from = npy_path_from[:-3]+'npy'
                npy_path_to = os.path.join(self._images_dir_train, p['to'])
                npy_path_to = npy_path_to[:-3] + 'npy'
            batch[0][i] = pose_transform.affine_transforms(kp_array1, kp_array2, self._image_size, self.use_body_mask)
            batch[1][i] = pose_transform.pose_masks(kp_array2, self._image_size, self.use_body_mask, self.use_mask, npy_path_to, self.fat)
            batch[2][i] = np.c_[np.ones([10,1]),np.zeros([10,3]),np.ones([10,1]),np.zeros([10,3])]
            batch[3][i] = batch[1][i]
            batch[4][i] = pose_transform.affine_transforms(kp_array2, kp_array1, self._image_size, self.use_body_mask)
            batch[5][i] = pose_transform.pose_masks(kp_array1, self._image_size, self.use_body_mask, self.use_mask, npy_path_from, self.fat)

            i += 1
        return batch
Esempio n. 2
0
 def compute_cord_warp_batch(self, pair_df):
     if self._warp_skip == 'full':
         batch = [np.empty([self._batch_size] + [1, 8])]
     elif self._warp_skip == 'mask':
         batch = [np.empty([self._batch_size] + [10, 8]),
                  np.empty([self._batch_size, 10] + list(self._image_size))]
     else:
         batch = [np.empty([self._batch_size] + [72])]
     i = 0
     for _, p in pair_df.iterrows():
         fr = self._annotations_file.loc[p['from']]
         to = self._annotations_file.loc[p['to']]
         kp_array1 = pose_utils.load_pose_cords_from_strings(fr['keypoints_y'],
                                                             fr['keypoints_x'])
         kp_array2 = pose_utils.load_pose_cords_from_strings(to['keypoints_y'],
                                                             to['keypoints_x'])
         if self._warp_skip == 'mask':
             batch[0][i] = pose_transform.affine_transforms(kp_array1, kp_array2)
             batch[1][i] = pose_transform.pose_masks(kp_array2, self._image_size)
         elif self._warp_skip == 'full':
             batch[0][i] = pose_transform.estimate_uniform_transform(kp_array1, kp_array2)
         else: #sel._warp_skip == 'stn'
             batch[0][i][:36] = kp_array1.reshape((-1, ))
             batch[0][i][36:] = kp_array2.reshape((-1, ))
         i += 1
     return batch
Esempio n. 3
0
    m = re.match(r'([A-Za-z0-9_]*.jpg)_([A-Za-z0-9_]*.jpg)', img_pair)
    fr = m.groups()[0]
    to = m.groups()[1]

    gen_img = imread(os.path.join(in_folder, img_pair))
    gen_img = gen_img[:, (2 * args.image_size[1]):]

    df = pd.read_csv(args.annotations_file_test, sep=':')
    ano_fr = df[df['name'] == fr].iloc[0]
    ano_to = df[df['name'] == to].iloc[0]
    kp_fr = pose_utils.load_pose_cords_from_strings(ano_fr['keypoints_y'],
                                                    ano_fr['keypoints_x'])
    kp_to = pose_utils.load_pose_cords_from_strings(ano_to['keypoints_y'],
                                                    ano_to['keypoints_x'])

    mask = pose_transform.pose_masks(kp_to,
                                     img_size=args.image_size).astype(bool)

    mask = np.array(reduce(np.logical_or, list(mask)))
    mask = mask.astype('float')

    pose_fr, _ = pose_utils.draw_pose_from_cords(kp_fr, args.image_size)
    pose_to, _ = pose_utils.draw_pose_from_cords(kp_to, args.image_size)

    cur_folder = os.path.join(out_folder, str(n))
    if not os.path.exists(cur_folder):
        os.makedirs(cur_folder)

    imsave(os.path.join(cur_folder, 'from.jpg'),
           imread(os.path.join(args.images_dir_test, fr)))
    imsave(os.path.join(cur_folder, 'to.jpg'),
           imread(os.path.join(args.images_dir_test, to)))