Exemplo n.º 1
0
    def __init__(self,
                 data_root,
                 data_split='train',
                 hand_side='right',
                 njoints=21,
                 use_cache=True,
                 visual=False):
        if not os.path.exists(data_root):
            raise ValueError("data_root: %s not exist" % data_root)
        self.name = 'stb'
        self.data_split = data_split
        self.hand_side = hand_side
        self.img_paths = []
        self.dep_paths = []
        self.joints = []
        self.kp2ds = []
        self.centers = []
        self.my_scales = []
        self.njoints = njoints  # total 21 hand parts
        self.visual = visual

        self.root_id = snap_joint_name2id['loc_bn_palm_L']
        self.mid_mcp_id = snap_joint_name2id['loc_bn_mid_L_01']
        ann_base = os.path.join(data_root, "labels")
        img_base = os.path.join(data_root, "images")
        sk_rot = sk_rot_mx(sk_rot_vec)

        self.sk_intr = np.array([
            [sk_fx_color, 0.0, sk_tx_color],
            [0.0, sk_fy_color, sk_ty_color],
            [0.0, 0.0, 1.0],
        ],
                                dtype=np.float32)  # (3,3)

        self.sequence = []
        if data_split == 'train':
            self.sequence = [
                "B2Counting", "B2Random", "B3Counting", "B3Random",
                "B4Counting", "B4Random", "B5Counting", "B5Random",
                "B6Counting", "B6Random"
            ]
        elif data_split == 'test':
            self.sequence = ["B1Counting", "B1Random"]
        elif data_split == 'val':
            self.sequence = ["B2Counting", "B2Random"]
        elif data_split == "train_val":
            self.sequence = [
                "B3Counting", "B3Random", "B4Counting", "B4Random",
                "B5Counting", "B5Random", "B6Counting", "B6Random"
            ]
        elif data_split == "all":
            self.sequence = [
                "B1Counting", "B1Random", "B2Counting", "B2Random",
                "B3Counting", "B3Random", "B4Counting", "B4Random",
                "B5Counting", "B5Random", "B6Counting", "B6Random"
            ]
        else:
            raise ValueError("split {} not in [train|test|val|train_val|all]")

        self.cache_folder = os.path.join(CACHE_HOME,
                                         "my-{}".format(data_split), "stb")
        os.makedirs(self.cache_folder, exist_ok=True)
        cache_path = os.path.join(self.cache_folder,
                                  "{}.pkl".format(self.data_split))
        if os.path.exists(cache_path) and use_cache:
            with open(cache_path, "rb") as fid:
                annotations = pickle.load(fid)
                self.img_paths = annotations["img_paths"]
                self.dep_paths = annotations["dep_paths"]
                self.joints = annotations["joints"]
                self.kp2ds = annotations["kp2ds"]
                self.centers = annotations["centers"]
                self.my_scales = annotations["my_scales"]
            print("stb {} gt loaded from {}".format(self.data_split,
                                                    cache_path))
            return

        self.imgpath_list = [
            os.path.join(img_base, seq) for seq in self.sequence
        ]

        imgsk_prefix = "SK_color"
        depsk_prefix = "SK_depth_seg"

        annsk_list = [
            os.path.join(ann_base, "{}_{}.mat".format(seq, imgsk_prefix[:2]))
            for seq in self.sequence
        ]

        self.ann_list = annsk_list

        for imgpath, ann in zip(self.imgpath_list, self.ann_list):
            ''' we only use SK image '''
            assert "SK" in ann
            ''' 1. load joint '''
            rawmat = sio.loadmat(ann)
            rawjoint = rawmat["handPara"].transpose((2, 1, 0))  # N x K x 3
            num = rawjoint.shape[0]  # N

            rawjoint = sk_xyz_depth2color(rawjoint, sk_trans_vec, sk_rot)
            # reorder idx
            joint = rawjoint[:, stb_to_snap_id, :]
            # scale to meter
            joint = joint / 1000.0
            # root from palm to wrist
            # joint = _stb_palm2wrist(joint)  # N x K x 3 # yang lixin
            joint = ge_palm2wrist(joint)  # N x K x 3  #liu hao ge // vae//
            self.joints.append(joint)
            ''' 4. load images pth '''
            for idx in range(joint.shape[0]):
                self.img_paths.append(
                    os.path.join(imgpath,
                                 "{}_{}.png".format(imgsk_prefix, idx)))
                self.dep_paths.append(
                    os.path.join(imgpath,
                                 "{}_{}.png".format(depsk_prefix, idx)))

        self.joints = np.concatenate(self.joints, axis=0).astype(
            np.float32)  ##(30000, 21, 3)

        for i in tqdm(range(len(self.img_paths))):
            joint = self.joints[i]
            kp2d_homo = self.sk_intr.dot(joint.T).T
            kp2d = kp2d_homo / kp2d_homo[:, 2:3]
            kp2d = kp2d[:, :2]
            center = handutils.get_annot_center(kp2d)

            # caculate my_scale
            dep = Image.open(self.dep_paths[i]).convert("RGB")
            rel_dep = self.real_dep_img(dep)
            mask_rel_dep = np.argwhere(rel_dep > 1e-6)
            # my_scale = handutils.get_ori_crop_scale(mask_rel_dep, side=0, kp2d=kp2d) # ori
            my_scale = handutils.get_ori_crop_scale(
                mask_rel_dep, side=0, kp2d=kp2d,
                mask_flag=False)  # get bbx only from kp2d ,比起ori差距不大,略好一点点一点点
            my_scale = (np.atleast_1d(my_scale))[np.newaxis, :]
            self.my_scales.append(my_scale)

            self.kp2ds.append(kp2d[np.newaxis, :, :])
            self.centers.append(center[np.newaxis, :])
            # self.scales.append((np.atleast_1d(scale))[np.newaxis, :])

        self.kp2ds = np.concatenate(self.kp2ds,
                                    axis=0).astype(np.float32)  # (N, 21, 2)
        self.centers = np.concatenate(self.centers,
                                      axis=0).astype(np.float32)  # (N, 2)
        # self.scales = np.concatenate(self.scales, axis=0).astype(np.float32)  # (N, 1)
        self.my_scales = np.concatenate(self.my_scales,
                                        axis=0).astype(np.float32)  # (N, 1)
        if use_cache:
            full_info = {
                "img_paths": self.img_paths,
                "dep_paths": self.dep_paths,
                "joints": self.joints,
                "kp2ds": self.kp2ds,
                "centers": self.centers,
                # "scales": self.scales,
                "my_scales": self.my_scales,
            }
            with open(cache_path, "wb") as fid:
                pickle.dump(full_info, fid)
                print("Wrote cache for dataset stb {} to {}".format(
                    self.data_split, cache_path))
        return
Exemplo n.º 2
0
    def __init__(self,
                 data_root,
                 data_split='test',
                 hand_side='right',
                 njoints=21,
                 use_cache=True,
                 vis=False):
        if not os.path.exists(data_root):
            raise ValueError("data_root: %s not exist" % data_root)

        self.name = 'do'
        self.data_root = data_root
        self.data_split = data_split
        self.hand_side = hand_side
        self.clr_paths = []
        self.dep_paths = []
        self.mask_paths = []
        self.joints = []
        self.anno_2d_depth = []
        self.centers = []
        self.my_scales = []
        self.sides = []
        self.intrs = []
        self.njoints = njoints
        self.reslu = [480, 640]
        self.vis = vis

        self.image_size = 128

        if data_split == 'test':
            self.sequence = [
                'Grasp1', 'Grasp2', 'Occlusion', 'Rigid', 'Pinch', 'Rotate'
            ]
        else:
            print(
                "DexterObjectDataset here only for evaluation, no train set here !"
            )
            return None

        # self.bboxes = pd.read_csv(
        #     os.path.join(data_root, 'bbox_dexter+object.csv'))

        color_intrisics = np.array([[587.45209, 0, 325], [0, 600.67456, 249],
                                    [0, 0, 1]])

        color_extrisics = np.array([[0.9999, 0.0034, 0.0161, 19.0473],
                                    [-0.0033, 1.0000, -0.0079, -1.8514],
                                    [-0.0162, 0.0079, 0.9998, -4.7501]])

        self.depth_intrisics = np.array([[224.502, 0, 160], [0, 230.494, 120],
                                         [0, 0, 1]])

        self.xmap = np.array([[j for i in range(320)] for j in range(240)])
        self.ymap = np.array([[i for i in range(320)] for j in range(240)])

        self.M_color = np.matmul(color_intrisics, color_extrisics)
        self.DO_PRED_2D = np.load(
            os.path.join(self.data_root, "DO_pred_2d.npy"))

        self.cache_folder = os.path.join(CACHE_HOME, "my-test",
                                         "DexterObjectDataset")
        os.makedirs(self.cache_folder, exist_ok=True)
        cache_path = os.path.join(self.cache_folder,
                                  "{}.pkl".format(self.data_split))

        if os.path.exists(cache_path) and use_cache:
            with open(cache_path, "rb") as fid:
                annotations = pickle.load(fid)
                self.clr_paths = annotations["clr_paths"]
                self.dep_paths = annotations["dep_paths"]
                self.anno_2d_depth = annotations["2d_depth"]
                self.joints = annotations["joints"],
                self.joints = self.joints[0]
                self.centers = annotations["centers"]
                self.my_scales = annotations["my_scales"]
            print("DexterObjectDataset {} gt loaded from {}".format(
                self.data_split, cache_path))
            return

        print("init DexterObjectDataset {}, It may take a while at first time".
              format(data_split))

        for fd in self.sequence:
            clr_fd_path = os.path.join(self.data_root, 'data', fd, 'color')
            clr_files = os.listdir(clr_fd_path)
            clr_files = [os.path.join(fd, 'color', x) for x in clr_files]
            clr_files = np.sort(clr_files)
            self.clr_paths.extend(clr_files)

            dep_fd_path = os.path.join(self.data_root, 'data', fd, 'depth')
            dep_files = os.listdir(dep_fd_path)
            dep_files = [os.path.join(fd, 'depth', x) for x in dep_files]
            dep_files = np.sort(dep_files)
            self.dep_paths.extend(dep_files)

            fn_anno_2d = os.path.join(self.data_root, 'data', fd,
                                      'annotations/', fd + '2D.txt')
            df_anno_2d = pd.read_table(fn_anno_2d, sep=';', header=None)
            cols = [0, 1, 2, 3, 4]
            df_anno_2d = df_anno_2d[cols]
            for col in cols:
                new_cols_2d = df_anno_2d[col].str.replace(' ', '').str.split(
                    ',', expand=True)
                df_anno_2d[[str(col) + '_u', str(col) + '_v']] = new_cols_2d
            df_anno_2d = df_anno_2d[df_anno_2d.columns[5:]]
            df_anno_2d = np.array(df_anno_2d, dtype='float32').reshape(
                [df_anno_2d.shape[0], -1, 2])
            self.anno_2d_depth.extend(df_anno_2d)

            fn_anno_3d = os.path.join(self.data_root, 'data', fd,
                                      'annotations', 'my_' + fd + '3D.txt')
            df_anno_3d = pd.read_table(fn_anno_3d, sep=';', header=None)
            cols = [0, 1, 2, 3, 4]
            df_anno_3d = df_anno_3d[cols]

            for col in cols:
                new_cols_3d = df_anno_3d[col].str.replace(' ', '').str.split(
                    ',', expand=True)
                df_anno_3d[[str(col) + '_x',
                            str(col) + '_y',
                            str(col) + '_z']] = new_cols_3d
            df_anno_3d = df_anno_3d[df_anno_3d.columns[5:]]
            df_anno_3d = np.array(df_anno_3d, dtype='float32').reshape(
                [df_anno_3d.shape[0], -1, 3])  # N*5*3
            self.joints.extend(df_anno_3d)

        self.joints = np.array(self.joints)
        for i in range(len(self.joints)):
            b = np.where(self.joints[i][:, 2:].squeeze() == 32001)
            self.joints[i][b] = np.array([np.nan, np.nan, np.nan])

            center = handutils.get_annot_center(self.DO_PRED_2D[i])
            my_scale = handutils.get_ori_crop_scale(mask=False,
                                                    mask_flag=False,
                                                    side=None,
                                                    kp2d=self.DO_PRED_2D[i])

            center = center[np.newaxis, :]
            self.centers.append(center)

            my_scale = (np.atleast_1d(my_scale))[np.newaxis, :]
            self.my_scales.append(my_scale)

        self.joints = self.joints / 1000.0  # transfer mm to m
        self.joints = self.joints.tolist()

        self.centers = np.concatenate(self.centers,
                                      axis=0).astype(np.float32)  # (N, 1)
        self.my_scales = np.concatenate(self.my_scales,
                                        axis=0).astype(np.float32)  # (N, 1)

        if use_cache:
            full_info = {
                "clr_paths": self.clr_paths,
                "dep_paths": self.dep_paths,
                "joints": self.joints,
                "2d_depth": self.anno_2d_depth,
                "centers": self.centers,
                "my_scales": self.my_scales,
            }
            with open(cache_path, "wb") as fid:
                pickle.dump(full_info, fid)
                print("Wrote cache for dataset DexterObjectDataset {} to {}".
                      format(self.data_split, cache_path))
        return
Exemplo n.º 3
0
    def __init__(self,
                 data_root="/disk1/data/RHD/RHD_published_v2",
                 data_split='train',
                 hand_side='right',
                 njoints=21,
                 use_cache=True,
                 visual=False):

        if not os.path.exists(data_root):
            raise ValueError("data_root: %s not exist" % data_root)

        self.name = 'rhd'
        self.data_split = data_split
        self.hand_side = hand_side
        self.clr_paths = []
        self.mask_paths = []
        self.joints = []
        self.kp2ds = []
        self.centers = []
        self.my_scales = []
        self.sides = []
        self.intrs = []
        self.njoints = njoints  # total 21 hand parts
        self.reslu = [320, 320]

        self.visual = visual

        self.root_id = snap_joint_name2id['loc_bn_palm_L']  # 0
        self.mid_mcp_id = snap_joint_name2id['loc_bn_mid_L_01']  # 9

        # [train|test|val|train_val|all]
        if data_split == 'train':
            self.sequence = [
                'training',
            ]
        elif data_split == 'test':
            self.sequence = [
                'evaluation',
            ]
        elif data_split == 'val':
            self.sequence = [
                'evaluation',
            ]
        elif data_split == 'train_val':
            self.sequence = [
                'training',
            ]
        elif data_split == 'all':
            self.sequence = ['training', 'evaluation']
        else:
            raise ValueError(
                "split {} not in [train|test|val|train_val|all]".format(
                    data_split))

        self.cache_folder = os.path.join(CACHE_HOME,
                                         "my-{}".format(data_split), "rhd")
        os.makedirs(self.cache_folder, exist_ok=True)
        cache_path = os.path.join(self.cache_folder,
                                  "{}.pkl".format(self.data_split))
        if os.path.exists(cache_path) and use_cache:
            with open(cache_path, "rb") as fid:
                annotations = pickle.load(fid)
                self.sides = annotations["sides"]
                self.clr_paths = annotations["clr_paths"]
                self.mask_paths = annotations["mask_paths"]
                self.joints = annotations["joints"]
                self.kp2ds = annotations["kp2ds"]
                self.intrs = annotations["intrs"]
                self.centers = annotations["centers"]
                self.my_scales = annotations["my_scales"]
            print("rhd {} gt loaded from {}".format(self.data_split,
                                                    cache_path))
            return

        datapath_list = [os.path.join(data_root, seq) for seq in self.sequence]
        annoname_list = ["anno_{}.pickle".format(seq) for seq in self.sequence]
        anno_list = [
            os.path.join(datapath, annoname) \
            for datapath, annoname in zip(datapath_list, annoname_list)
        ]
        clr_root_list = [
            os.path.join(datapath, "color") for datapath in datapath_list
        ]
        dep_root_list = [
            os.path.join(datapath, "depth") for datapath in datapath_list
        ]
        mask_root_list = [
            os.path.join(datapath, "mask") for datapath in datapath_list
        ]

        print("init RHD {}, It will take a while at first time".format(
            data_split))
        for anno, clr_root, dep_root, mask_root \
                in zip(
            anno_list,
            clr_root_list,
            dep_root_list,
            mask_root_list
        ):

            with open(anno, 'rb') as fi:
                rawdatas = pickle.load(fi)
                fi.close()

            bar = Bar('RHD', max=len(rawdatas))
            for i in tqdm(range(len(rawdatas))):

                raw = rawdatas[i]
                rawkp2d = raw['uv_vis'][:, :2]  # kp 2d left & right hand
                rawvis = raw['uv_vis'][:, 2]

                rawjoint = raw[
                    'xyz']  # x, y, z coordinates of the keypoints, in meters
                rawintr = raw['K']
                ''' "both" means left, right'''
                kp2dboth = [
                    rawkp2d[:21][rhd_to_snap_id, :],
                    rawkp2d[21:][rhd_to_snap_id, :]
                ]
                visboth = [
                    rawvis[:21][rhd_to_snap_id], rawvis[21:][rhd_to_snap_id]
                ]
                jointboth = [
                    rawjoint[:21][rhd_to_snap_id, :],
                    rawjoint[21:][rhd_to_snap_id, :]
                ]

                intrboth = [rawintr, rawintr]
                sideboth = ['l', 'r']

                l_kp_count = np.sum(raw['uv_vis'][:21, 2] == 1)
                r_kp_count = np.sum(raw['uv_vis'][21:, 2] == 1)
                vis_side = 'l' if l_kp_count > r_kp_count else 'r'

                for kp2d, vis, joint, side, intr \
                        in zip(kp2dboth, visboth, jointboth, sideboth, intrboth):
                    if side != vis_side:
                        continue

                    clrpth = os.path.join(clr_root, '%.5d.png' % i)
                    maskpth = os.path.join(mask_root, '%.5d.png' % i)
                    self.clr_paths.append(clrpth)
                    self.mask_paths.append(maskpth)
                    self.sides.append(side)

                    joint = joint[np.newaxis, :, :]
                    self.joints.append(joint)

                    center = handutils.get_annot_center(kp2d)
                    kp2d = kp2d[np.newaxis, :, :]
                    self.kp2ds.append(kp2d)

                    center = center[np.newaxis, :]
                    self.centers.append(center)

                    mask = Image.open(maskpth).convert("RGB")
                    mask = np.array(mask)[:, :, 2:]
                    my_scale = handutils.get_ori_crop_scale(
                        mask, side, kp2d.squeeze(0))
                    my_scale = (np.atleast_1d(my_scale))[np.newaxis, :]
                    self.my_scales.append(my_scale)

                    intr = intr[np.newaxis, :]
                    self.intrs.append(intr)

                bar.suffix = ('({n}/{all}), total:{t:}s, eta:{eta:}s').format(
                    n=i + 1,
                    all=len(rawdatas),
                    t=bar.elapsed_td,
                    eta=bar.eta_td)
                bar.next()

            bar.finish()
        self.joints = np.concatenate(self.joints,
                                     axis=0).astype(np.float32)  # (N, 21, 3)

        self.kp2ds = np.concatenate(self.kp2ds,
                                    axis=0).astype(np.float32)  # (N, 21, 2)
        self.centers = np.concatenate(self.centers,
                                      axis=0).astype(np.float32)  # (N, 1)
        self.my_scales = np.concatenate(self.my_scales,
                                        axis=0).astype(np.float32)  # (N, 1)
        self.intrs = np.concatenate(self.intrs,
                                    axis=0).astype(np.float32)  # (N, 3,3)

        if use_cache:
            full_info = {
                "sides": self.sides,
                "clr_paths": self.clr_paths,
                "mask_paths": self.mask_paths,
                "joints": self.joints,
                "kp2ds": self.kp2ds,
                "intrs": self.intrs,
                "centers": self.centers,
                "my_scales": self.my_scales,
            }
            with open(cache_path, "wb") as fid:
                pickle.dump(full_info, fid)
                print("Wrote cache for dataset rhd {} to {}".format(
                    self.data_split, cache_path))
        return
Exemplo n.º 4
0
    def __init__(self,
                 data_root="/home/chen/datasets/CMU/hand_labels",
                 data_split='train',
                 hand_side='right',
                 njoints=21,
                 use_cache=True,
                 vis=False):

        if not os.path.exists(data_root):
            raise ValueError("data_root: %s not exist" % data_root)

        self.vis = vis
        self.name = 'CMU:hand_labels'
        self.data_split = data_split
        self.hand_side = hand_side
        self.clr_paths = []
        self.kp2ds = []
        self.centers = []
        self.sides = []
        self.my_scales = []
        self.njoints = njoints
        self.reslu = [1920, 1080]

        self.root_id = snap_joint_name2id['loc_bn_palm_L']  # 0
        self.mid_mcp_id = snap_joint_name2id['loc_bn_mid_L_01']  # 9

        # [train|test|val|train_val|all]
        if data_split == 'train':
            self.sequence = [
                'manual_train',
            ]
        elif data_split == 'test':
            self.sequence = [
                'manual_test',
            ]
        elif data_split == 'val':
            self.sequence = [
                'manual_test',
            ]
        elif data_split == 'train_val':
            self.sequence = [
                'manual_train',
            ]
        elif data_split == 'all':
            self.sequence = ['manual_train', 'manual_test']
        else:
            raise ValueError("hand_labels only has train_set!")

        self.cache_folder = os.path.join(CACHE_HOME, "my-train", "hand_labels")
        os.makedirs(self.cache_folder, exist_ok=True)
        cache_path = os.path.join(self.cache_folder,
                                  "{}.pkl".format(self.data_split))

        if os.path.exists(cache_path) and use_cache:
            with open(cache_path, "rb") as fid:
                annotations = pickle.load(fid)
                self.sides = annotations["sides"]
                self.clr_paths = annotations["clr_paths"]
                self.kp2ds = annotations["kp2ds"]
                self.centers = annotations["centers"]
                self.my_scales = annotations["my_scales"]
            print("hand_labels {} gt loaded from {}".format(
                self.data_split, cache_path))
            return

        datapath_list = [os.path.join(data_root, seq) for seq in self.sequence]

        for datapath in datapath_list:
            files = sorted(
                [f for f in os.listdir(datapath) if f.endswith('.json')])

            for idx in tqdm(range(len(files))):
                f = files[idx]
                with open(os.path.join(datapath, f), 'r') as fid:
                    dat = json.load(fid)

                kp2d = np.array(dat['hand_pts'])[:, :2]
                is_left = dat['is_left']
                self.sides.append("left" if is_left else "right")

                clr_pth = os.path.join(datapath, f[0:-5] + '.jpg')
                self.clr_paths.append(clr_pth)
                center = handutils.get_annot_center(kp2d)
                my_scale = handutils.get_ori_crop_scale(mask=False,
                                                        mask_flag=False,
                                                        side=None,
                                                        kp2d=kp2d)

                kp2d = kp2d[np.newaxis, :, :]
                self.kp2ds.append(kp2d)

                center = center[np.newaxis, :]
                self.centers.append(center)

                my_scale = (np.atleast_1d(my_scale))[np.newaxis, :]
                self.my_scales.append(my_scale)

            self.kp2ds = np.concatenate(self.kp2ds, axis=0).astype(
                np.float32)  # (N, 21, 2)
            self.centers = np.concatenate(self.centers,
                                          axis=0).astype(np.float32)  # (N, 1)
            self.my_scales = np.concatenate(self.my_scales, axis=0).astype(
                np.float32)  # (N, 1)

        if use_cache:
            full_info = {
                "sides": self.sides,
                "clr_paths": self.clr_paths,
                "kp2ds": self.kp2ds,
                "centers": self.centers,
                "my_scales": self.my_scales,
            }
            with open(cache_path, "wb") as fid:
                pickle.dump(full_info, fid)
                print("Wrote cache for dataset hand_labels {} to {}".format(
                    self.data_split, cache_path))
        return
Exemplo n.º 5
0
    def __init__(self,
                 data_root="/home/chen/datasets/CMU/hand143_panopticdb",
                 data_split='train',
                 hand_side='right',
                 njoints=21,
                 use_cache=True,
                 vis=False):

        if not os.path.exists(data_root):
            raise ValueError("data_root: %s not exist" % data_root)

        self.name = 'hand143_panopticdb'
        self.data_split = data_split
        self.hand_side = hand_side
        self.clr_paths = []
        self.kp2ds = []
        self.centers = []
        self.my_scales = []
        self.njoints = njoints
        self.reslu = [1920, 1080]
        self.vis = vis

        self.root_id = snap_joint_name2id['loc_bn_palm_L']  # 0
        self.mid_mcp_id = snap_joint_name2id['loc_bn_mid_L_01']  # 9

        # [train|test|val|train_val|all]
        if data_split == 'train':
            self.sequence = [
                'training',
            ]
        else:
            print("hand143_panopticdb only has train_set!")
            return

        self.cache_folder = os.path.join(CACHE_HOME, "my-train",
                                         "hand143_panopticdb")
        os.makedirs(self.cache_folder, exist_ok=True)
        cache_path = os.path.join(self.cache_folder,
                                  "{}.pkl".format(self.data_split))

        if os.path.exists(cache_path) and use_cache:
            with open(cache_path, "rb") as fid:
                annotations = pickle.load(fid)
                self.clr_paths = annotations["clr_paths"]
                self.kp2ds = annotations["kp2ds"]
                self.centers = annotations["centers"]
                self.my_scales = annotations["my_scales"]
            print("hand143_panopticdb {} gt loaded from {}".format(
                self.data_split, cache_path))
            return

        self.clr_root_list = [os.path.join(data_root, "imgs")]

        self.ann_list = [os.path.join(data_root, "hands_v143_14817.json")]

        for clr_root, ann in zip(self.clr_root_list, self.ann_list):

            jsonPath = os.path.join(ann)
            with open(jsonPath, 'r') as fid:
                dat_all = json.load(fid)
                dat_all = dat_all['root']

            for i in tqdm(range(len(dat_all))):
                clrpth = os.path.join(clr_root, '%.8d.jpg' % i)
                self.clr_paths.append(clrpth)

                dat = dat_all[i]
                kp2d = np.array(
                    dat['joint_self'])[:, :2]  # kp 2d left & right hand
                center = handutils.get_annot_center(kp2d)
                my_scale = handutils.get_ori_crop_scale(mask=None,
                                                        side=None,
                                                        mask_flag=False,
                                                        kp2d=kp2d)

                kp2d = kp2d[np.newaxis, :, :]
                self.kp2ds.append(kp2d)

                center = center[np.newaxis, :]
                self.centers.append(center)

                my_scale = (np.atleast_1d(my_scale))[np.newaxis, :]
                self.my_scales.append(my_scale)

        self.kp2ds = np.concatenate(self.kp2ds,
                                    axis=0).astype(np.float32)  # (N, 21, 2)
        self.centers = np.concatenate(self.centers,
                                      axis=0).astype(np.float32)  # (N, 1)
        self.my_scales = np.concatenate(self.my_scales,
                                        axis=0).astype(np.float32)  # (N, 1)

        if use_cache:
            full_info = {
                "clr_paths": self.clr_paths,
                "kp2ds": self.kp2ds,
                "centers": self.centers,
                "my_scales": self.my_scales,
            }
            with open(cache_path, "wb") as fid:
                pickle.dump(full_info, fid)
                print("Wrote cache for dataset hand143_panopticdb {} to {}".
                      format(self.data_split, cache_path))
        return
    def __init__(self,
                 data_root,
                 data_split='train',
                 hand_side='right',
                 njoints=21,
                 use_cache=True,
                 vis=False):
        if not os.path.exists(data_root):
            raise ValueError("data_root: %s not exist" % data_root)
        self.name = 'GANeratedHands Dataset'
        self.data_split = data_split
        self.hand_side = hand_side
        self.clr_paths = []
        self.kp2ds = []
        self.joints = []
        self.centers = []
        self.my_scales = []
        self.njoints = njoints
        self.reslu = [256, 256]

        self.vis = vis

        self.root_id = snap_joint_name2id['loc_bn_palm_L']  # 0
        self.mid_mcp_id = snap_joint_name2id['loc_bn_mid_L_01']  # 9

        self.intr = np.array([
            [617.173, 0, 315.453],
            [0, 617.173, 242.259],
            [0, 0, 1]])

        # [train|test|val|train_val|all]
        if data_split == 'train':
            self.sequence = ['training', ]
        else:
            print("GANeratedDataset only has train set!")
            return None

        self.cache_folder = os.path.join(CACHE_HOME, "my-train", "GANeratedHands")
        os.makedirs(self.cache_folder, exist_ok=True)
        cache_path = os.path.join(
            self.cache_folder, "{}.pkl".format(self.data_split)
        )

        if os.path.exists(cache_path) and use_cache:
            with open(cache_path, "rb") as fid:
                annotations = pickle.load(fid)
                self.clr_paths = annotations["clr_paths"]
                self.kp2ds = annotations["kp2ds"]
                self.joints = annotations["joints"]
                self.centers = annotations["centers"]
                self.my_scales = annotations["my_scales"]
            print("GANeratedHands {} gt loaded from {}".format(self.data_split, cache_path))
            return

        print("init GANeratedHands {}, It will take a while at first time".format(data_split))

        for img_type in ['noObject/', 'withObject/']:
            folders = os.listdir(data_root + img_type)
            folders = sorted(folders)
            folders = [img_type + x + '/' for x in folders if len(x) == 4]

            for folder in folders:
                images = os.listdir(os.path.join(data_root + folder))
                images = [data_root + folder + x for x in images if x.find('.png') > 0]
                images = sorted(images)

                self.clr_paths.extend(images)

        for idx in tqdm(range(len(self.clr_paths))):
            img_name = self.clr_paths[idx]

            fn_2d_keypoints = img_name.replace('color_composed.png', 'joint2D.txt')
            arr_2d_keypoints = np.loadtxt(fn_2d_keypoints, delimiter=',')
            arr_2d_keypoints = arr_2d_keypoints.reshape([-1, 2])

            center = handutils.get_annot_center(arr_2d_keypoints)
            self.centers.append(center[np.newaxis, :])

            my_scale = handutils.get_ori_crop_scale(mask=None, mask_flag=False, side=None, kp2d=arr_2d_keypoints,
                                                    )
            my_scale = (np.atleast_1d(my_scale))[np.newaxis, :]
            self.my_scales.append(my_scale)

            arr_2d_keypoints = arr_2d_keypoints[np.newaxis, :, :]
            self.kp2ds.append(arr_2d_keypoints)

            fn_3d_keypoints = img_name.replace('color_composed.png', 'joint_pos_global.txt')
            arr_3d_keypoints = np.loadtxt(fn_3d_keypoints, delimiter=',')
            arr_3d_keypoints = arr_3d_keypoints.reshape([-1, 3])
            arr_3d_keypoints = arr_3d_keypoints[np.newaxis, :, :]
            self.joints.append(arr_3d_keypoints)

        self.joints = np.concatenate(self.joints, axis=0).astype(np.float32)
        self.kp2ds = np.concatenate(self.kp2ds, axis=0).astype(np.float32)  # (N, 21, 2)
        self.centers = np.concatenate(self.centers, axis=0).astype(np.float32)  # (N, 2)
        self.my_scales = np.concatenate(self.my_scales, axis=0).astype(np.float32)

        if use_cache:
            full_info = {
                "clr_paths": self.clr_paths,
                "joints": self.joints,
                "kp2ds": self.kp2ds,
                "centers": self.centers,
                "my_scales": self.my_scales,
            }
            with open(cache_path, "wb") as fid:
                pickle.dump(full_info, fid)
                print("Wrote cache for dataset GANeratedDataset {} to {}".format(
                    self.data_split, cache_path
                ))
        return