def __getitem__(self, index):
        item_info = self.train_set_info[index]
        img = Image.open(
            os.path.join(self.data_root, 'image',
                         item_info['image'])).convert('RGB')
        mask = Image.open(
            os.path.join(self.data_root, 'mask', item_info['mask']))

        if self.transform is not None:
            img = self.transform(img)
            mask = self.transform(mask)
        vns_dict = load_pickle_file(
            os.path.join(self.data_root, 'verts_normals',
                         item_info['model'] + '.pkl'))
        verts = vns_dict['verts']
        normals = vns_dict['normals']
        camera_dict = load_pickle_file(
            os.path.join(self.data_root, 'cameras',
                         item_info['model'] + '.pkl'))
        K = camera_dict['K']
        output = {
            'image': img,
            'mask': mask,
            'vertices': torch.from_numpy(verts).float(),
            'normals': torch.from_numpy(normals).float(),
            'translation':
            torch.tensor(item_info['pose']['translation']).float(),
            'rotation': torch.tensor(item_info['pose']['rotation']).float(),
            'K': torch.from_numpy(K).float()
        }

        return output
Esempio n. 2
0
    def _read_vids_info(self, file_path):
        vids_info = []
        with open(file_path, 'r') as reader:

            lines = []
            for line in reader:
                line = line.rstrip()
                lines.append(line)

            total = len(lines)
            for i, line in enumerate(lines):
                images_path = glob.glob(
                    os.path.join(self._vids_dir, line, '*.jpg'))
                images_path.sort()
                smpl_data = load_pickle_file(
                    os.path.join(self._smpls_dir, line, 'pose_shape.pkl'))
                kps_data = load_pickle_file(
                    os.path.join(self._smpls_dir, line, 'kps.pkl'))

                cams = smpl_data['cams']
                kps = (kps_data['kps'] + 1) / 2.0 * 1024

                assert len(images_path) == len(cams), '{} != {}'.format(
                    len(images_path), len(cams))

                info = {
                    'images': images_path,
                    'cams': cams,
                    'thetas': smpl_data['pose'],
                    'betas': smpl_data['shape'],
                    'j2ds': kps,
                    'length': len(images_path)
                }
                vids_info.append(info)
                self._dataset_size += info['length'] // self._intervals
                # self._dataset_size += info['length']
                self._num_videos += 1
                print('loading video = {}, {} / {}'.format(line, i, total))

                if self._opt.debug:
                    if i > 1:
                        break

        return vids_info
Esempio n. 3
0
    def _read_pair_list(self, im_dir, pair_pkl_path):
        pair_list = load_pickle_file(pair_pkl_path)
        new_pair_list = []

        for i, pairs in enumerate(pair_list):
            src_path = os.path.join(im_dir, pairs[0])
            dst_path = os.path.join(im_dir, pairs[1])

            new_pair_list.append((src_path, dst_path))

        return new_pair_list
Esempio n. 4
0
    def __init__(self, pkl_path, rotate=False):
        """
        pkl_path is the path to a SMPL model
        """
        super(SMPL, self).__init__()
        self.rotate = rotate

        # -- Load SMPL params --
        dd = load_pickle_file(pkl_path)

        # define faces
        # self.register_buffer('faces', torch.from_numpy(undo_chumpy(dd['f']).astype(np.int32)).type(dtype=torch.int32))
        self.faces = torch.from_numpy(dd['f'].astype(
            np.int32)).type(dtype=torch.int32)

        # Mean template vertices
        self.register_buffer('v_template', torch.FloatTensor(dd['v_template']))
        # Size of mesh [Number of vertices, 3], (6890, 3)
        self.size = [self.v_template.shape[0], 3]
        self.num_betas = dd['shapedirs'].shape[-1]
        # Shape blend shape basis (shapedirs): (6980, 3, 10)
        # reshaped to (6980*3, 10), transposed to (10, 6980*3)
        self.register_buffer(
            'shapedirs',
            torch.FloatTensor(
                np.reshape(dd['shapedirs'], [-1, self.num_betas]).T))

        # Regressor for joint locations given shape -> (24, 6890)
        # Transpose to shape (6890, 24)
        self.register_buffer(
            'J_regressor',
            torch.FloatTensor(np.asarray(dd['J_regressor'].T.todense())))

        # Pose blend shape basis: (6890, 3, 207)
        num_pose_basis = dd['posedirs'].shape[-1]

        # Pose blend pose basis is reshaped to (6890*3, 207)
        # posedirs is transposed into (207, 6890*3)
        self.register_buffer(
            'posedirs',
            torch.FloatTensor(
                np.reshape(dd['posedirs'], [-1, num_pose_basis]).T))

        # indices of parents for each joints
        self.parents = np.array(dd['kintree_table'][0].astype(np.int32))

        # LBS weights (6890, 24)
        self.register_buffer('weights', torch.FloatTensor(dd['weights']))

        # This returns 19 keypoints: 6890 x 19
        joint_regressor = torch.FloatTensor(
            np.asarray(dd['cocoplus_regressor'].T.todense()))

        self.register_buffer('joint_regressor', joint_regressor)
Esempio n. 5
0
    def __init__(self, imgs_path, pose_shape_pkl_path, image_size=256):
        imgs_fn_list = os.listdir(imgs_path)
        imgs_fn_list.sort()
        self.imgs_path_list = [
            os.path.join(imgs_path, img_fn) for img_fn in imgs_fn_list
        ]
        self.pose_shape_pkl = load_pickle_file(pose_shape_pkl_path)
        self.Resize = transforms.Resize(image_size)
        self.ToTensor = transforms.ToTensor()

        if len(self.imgs_path_list) != self.pose_shape_pkl['pose'].shape[0]:
            print('images: ', len(self.imgs_path_list))
            print('smpls: ', self.pose_shape_pkl['pose'].shape[0])
Esempio n. 6
0
    def load_senet_model(self, pretrain_model):
        # saved_data = torch.load(pretrain_model, encoding='latin1')
        from utils.util import load_pickle_file
        saved_data = load_pickle_file(pretrain_model)
        save_weights_dict = dict()

        for key, val in saved_data.items():
            if key.startswith('fc'):
                continue
            save_weights_dict[key] = torch.from_numpy(val)

        self.net.load_state_dict(save_weights_dict)

        print('load face model from {}'.format(pretrain_model))
Esempio n. 7
0
    def load_sample(self, im_pairs, pkl_path):
        # 1. load images
        imgs = self.load_images(im_pairs)
        # 2.load pickle data
        pkl_data = load_pickle_file(pkl_path)
        src_fim = pkl_data['from_face_index_map'][:, :,
                                                  0]  # (img_size, img_size)
        dst_fim = pkl_data['to_face_index_map'][:, :,
                                                0]  # (img_size, img_size)
        T = pkl_data['T']  # (img_size, img_size, 2)
        fims = np.stack([src_fim, dst_fim], axis=0)

        fims_enc = self.map_fn[fims]  # (2, h, w, c)
        fims_enc = np.transpose(fims_enc, axes=(0, 3, 1, 2))  # (2, c, h, w)

        sample = {
            'images': torch.tensor(imgs).float(),
            'src_fim': torch.tensor(src_fim).float(),
            'tsf_fim': torch.tensor(dst_fim).float(),
            'fims': torch.tensor(fims_enc).float(),
            'T': torch.tensor(T).float(),
            'j2d': torch.tensor(pkl_data['j2d']).float()
        }

        if 'warp' in pkl_data:
            if len(pkl_data['warp'].shape) == 4:
                sample['warp'] = torch.tensor(pkl_data['warp'][0],
                                              dtype=torch.float32)
            else:
                sample['warp'] = torch.tensor(pkl_data['warp'],
                                              dtype=torch.float32)
        elif 'warp_R' in pkl_data:
            sample['warp'] = torch.tensor(pkl_data['warp_R'][0],
                                          dtype=torch.float32)
        elif 'warp_T' in pkl_data:
            sample['warp'] = torch.tensor(pkl_data['warp_T'][0],
                                          dtype=torch.float32)

        if 'T_cycle' in pkl_data:
            sample['T_cycle'] = torch.tensor(pkl_data['T_cycle']).float()

        if 'T_cycle_vis' in pkl_data:
            sample['T_cycle_vis'] = torch.tensor(
                pkl_data['T_cycle_vis']).float()

        return sample
    def load_sample(self, im_pairs, pkl_path):
        # 1. load images
        imgs = self.load_images(im_pairs)
        # 2.load pickle data
        pkl_data = load_pickle_file(pkl_path)
        src_fim = pkl_data['from_face_index_map'][:, :,
                                                  0]  # (img_size, img_size)
        dst_fim = pkl_data['to_face_index_map'][:, :,
                                                0]  # (img_size, img_size)
        T = pkl_data['T']  # (img_size, img_size, 2)
        fims = np.stack([src_fim, dst_fim], axis=0)

        fims_enc = self.map_fn[fims]  # (2, h, w, c)
        fims_enc = np.transpose(fims_enc, axes=(0, 3, 1, 2))  # (2, c, h, w)

        heads_mask = self.head_fn[fims[1:]]  # (1, h, w, 1)
        heads_mask = np.transpose(heads_mask,
                                  axes=(0, 3, 1, 2))  # (1, 1, h, w)
        head_bbox, _ = cal_mask_bbox(heads_mask, factor=1.05)
        body_bbox, _ = cal_mask_bbox(1 - fims_enc[1:, -1:], factor=1.2)

        # print(head_bbox.shape, valid_bbox.shape)
        sample = {
            'images': torch.tensor(imgs).float(),
            'fims': torch.tensor(fims_enc).float(),
            'T': torch.tensor(T).float(),
            'head_bbox': torch.tensor(head_bbox[0]).long(),
            'body_bbox': torch.tensor(body_bbox[0]).long()
        }

        if 'warp' in pkl_data:
            if len(pkl_data['warp'].shape) == 4:
                sample['warp'] = torch.tensor(pkl_data['warp'][0],
                                              dtype=torch.float32)
            else:
                sample['warp'] = torch.tensor(pkl_data['warp'],
                                              dtype=torch.float32)
        elif 'warp_R' in pkl_data:
            sample['warp'] = torch.tensor(pkl_data['warp_R'][0],
                                          dtype=torch.float32)
        elif 'warp_T' in pkl_data:
            sample['warp'] = torch.tensor(pkl_data['warp_T'][0],
                                          dtype=torch.float32)

        return sample
Esempio n. 9
0
    def _read_samples_info(self, im_dir, pkl_dir, pair_ids_filepath):
        """
        Args:
            im_dir:
            pkl_dir:
            pair_ids_filepath:

        Returns:

        """
        # 1. load image pair list
        self.im_pair_list = load_pickle_file(pair_ids_filepath)

        # 2. load pkl file paths
        self.all_pkl_paths = sorted(glob.glob((os.path.join(pkl_dir, '*.pkl'))))

        assert len(self.im_pair_list) == len(self.all_pkl_paths), '{} != {}'.format(
            len(self.im_pair_list), len(self.all_pkl_paths)
        )
        self._dataset_size = len(self.im_pair_list)
    def _read_samples_info(self, im_dir, pkl_dir, pair_ids_filepath):
        """
        Args:
            im_dir:
            pkl_dir:
            pair_ids_filepath:

        Returns:

        """
        # 1. load image pair list
        im_pair_list = load_pickle_file(pair_ids_filepath)

        # 2. load pkl file paths
        all_pkl_paths = sorted(glob.glob((os.path.join(pkl_dir, '*.pkl'))))

        # 3. filters the source image is not front
        self.im_pair_list = []
        self.all_pkl_paths = []

        for pairs, pkl_path in zip(im_pair_list, all_pkl_paths):
            src_path = os.path.join(im_dir, pairs[0])
            dst_path = os.path.join(im_dir, pairs[1])

            if 'side' in src_path or 'back' in src_path:
                continue

            src_path = os.path.join(im_dir, src_path)
            dst_path = os.path.join(im_dir, dst_path)

            self.im_pair_list.append((src_path, dst_path))
            self.all_pkl_paths.append(pkl_path)

        assert len(self.im_pair_list) == len(
            self.all_pkl_paths), '{} != {}'.format(len(self.im_pair_list),
                                                   len(self.all_pkl_paths))
        self._dataset_size = len(self.im_pair_list)

        del im_pair_list
        del all_pkl_paths
Esempio n. 11
0
    def __init__(self,
                 feat_dim,
                 hid_dim,
                 out_dim,
                 deformed=0.1,
                 adj_mat_pkl_path='assets/adj_mat_info.pkl'):
        super(MeshDeformation, self).__init__()
        self.deformed = deformed

        adj_mat_info = load_pickle_file(adj_mat_pkl_path)
        adj_mat = torch_sparse_tensor(adj_mat_info['indices'],
                                      adj_mat_info['value'],
                                      adj_mat_info['size'])

        self.deformation = GBottleneck(2,
                                       feat_dim,
                                       hid_dim,
                                       out_dim,
                                       adj_mat,
                                       activation='relu')

        self.to_verts = nn.Tanh()
Esempio n. 12
0
    def __init__(self,
                 pkl_path,
                 hresMapping_pkl_path='assets/hresMapping.pkl',
                 rotate=False,
                 isHres=False):
        """
        pkl_path is the path to a SMPL model
        """
        super(SMPL, self).__init__()
        self.rotate = rotate
        self.isHres = isHres

        # -- Load SMPL params --
        dd = load_pickle_file(pkl_path)

        # define faces
        # self.register_buffer('faces', torch.from_numpy(undo_chumpy(dd['f']).astype(np.int32)).type(dtype=torch.int32))
        self.faces = torch.from_numpy(dd['f'].astype(
            np.int32)).type(dtype=torch.int32)

        # Mean template vertices
        self.register_buffer('v_template', torch.FloatTensor(dd['v_template']))
        # Size of mesh [Number of vertices, 3], (6890, 3)
        self.size = [self.v_template.shape[0], 3]
        self.num_betas = dd['shapedirs'].shape[-1]
        # Shape blend shape basis (shapedirs): (6980, 3, 10)
        # reshaped to (6980*3, 10), transposed to (10, 6980*3)
        self.register_buffer(
            'shapedirs',
            torch.FloatTensor(
                np.reshape(dd['shapedirs'], [-1, self.num_betas]).T))

        # Regressor for joint locations given shape -> (24, 6890)
        # Transpose to shape (6890, 24)
        self.register_buffer(
            'J_regressor',
            torch.FloatTensor(np.asarray(dd['J_regressor'].T.todense())))

        # Pose blend shape basis: (6890, 3, 207)
        num_pose_basis = dd['posedirs'].shape[-1]

        # Pose blend pose basis is reshaped to (6890*3, 207)
        # posedirs is transposed into (207, 6890*3)
        self.register_buffer(
            'posedirs',
            torch.FloatTensor(
                np.reshape(dd['posedirs'], [-1, num_pose_basis]).T))

        # indices of parents for each joints
        self.parents = np.array(dd['kintree_table'][0].astype(np.int32))

        # LBS weights (6890, 24)
        self.register_buffer('weights', torch.FloatTensor(dd['weights']))

        # This returns 19 keypoints: 6890 x 19
        joint_regressor = torch.FloatTensor(
            np.asarray(dd['cocoplus_regressor'].T.todense()))

        self.register_buffer('joint_regressor', joint_regressor)

        mapping, nf = load_pickle_file(hresMapping_pkl_path)

        self.faces_hres = torch.from_numpy(nf.astype(
            np.int32)).type(dtype=torch.int32)

        weights_hres = torch.FloatTensor(
            np.hstack([
                np.expand_dims(np.mean(mapping.dot(
                    np.repeat(np.expand_dims(dd['weights'][:, i], -1),
                              3)).reshape(-1, 3),
                                       axis=1),
                               axis=-1) for i in range(24)
            ]))
        self.register_buffer('weights_hres', weights_hres)

        mapping = coo_matrix_to_torch_sparse_tensor(mapping.tocoo())
        self.register_buffer('mapping', mapping)