def load_dataset(self): cfg = self.cfg file_name = os.path.join(self.cfg["project_path"], cfg["dataset"]) mlab = sio.loadmat(file_name) self.raw_data = mlab mlab = mlab["dataset"] num_images = mlab.shape[1] data = [] has_gt = True for i in range(num_images): sample = mlab[0, i] item = DataItem() item.image_id = i base = str(self.cfg["project_path"]) im_path = sample[0][0] if isinstance(im_path, str): im_path = robust_split_path(im_path) else: im_path = [s.strip() for s in im_path] item.im_path = os.path.join(base, *im_path) item.im_size = sample[1][0] if len(sample) >= 3: joints = sample[2][0][0] joint_id = joints[:, 0] # make sure joint ids are 0-indexed if joint_id.size != 0: assert (joint_id < cfg["num_joints"]).any() joints[:, 0] = joint_id coords = [joint[1:] for joint in joints] coords = arr(coords) item.coords = coords item.joints = [joints] item.joint_id = [arr(joint_id)] # print(item.joints) else: has_gt = False # if cfg.crop: # crop = sample[3][0] - 1 # item.crop = extend_crop(crop, cfg.crop_pad, item.im_size) data.append(item) self.has_gt = has_gt return data
def load_dataset(self): cfg = self.cfg file_name = os.path.join(self.cfg["project_path"], cfg["dataset"]) with open(os.path.join(self.cfg["project_path"], file_name), "rb") as f: # Pickle the 'data' dictionary using the highest protocol available. pickledata = pickle.load(f) self.raw_data = pickledata num_images = len(pickledata) data = [] has_gt = True for i in range(num_images): sample = pickledata[i] # mlab[0, i] item = DataItem() item.image_id = i im_path = sample["image"] if isinstance(im_path, str): im_path = robust_split_path(im_path) item.im_path = os.path.join(*im_path) item.im_size = sample["size"] if "joints" in sample.keys(): Joints = sample["joints"] if ( np.size( np.concatenate( [Joints[person_id][:, 1:3] for person_id in Joints.keys()] ) ) > 0 ): item.joints = Joints else: has_gt = False # no animal has joints! # item.numanimals=len(item.joints)-1 #as there are also the parts that are not per animal else: has_gt = False data.append(item) self.has_gt = has_gt return data
def load_dataset(self): cfg = self.cfg file_name = os.path.join(self.cfg["project_path"], cfg["dataset"]) if ".mat" in file_name: # legacy loader mlab = sio.loadmat(file_name) self.raw_data = mlab mlab = mlab["dataset"] num_images = mlab.shape[1] data = [] has_gt = True for i in range(num_images): sample = mlab[0, i] item = DataItem() item.image_id = i im_path = sample[0][0] if isinstance(im_path, str): im_path = robust_split_path(im_path) else: im_path = [s.strip() for s in im_path] item.im_path = os.path.join(*im_path) item.im_size = sample[1][0] if len(sample) >= 3: joints = sample[2][0][0] joint_id = joints[:, 0] # make sure joint ids are 0-indexed if joint_id.size != 0: assert (joint_id < cfg["num_joints"]).any() joints[:, 0] = joint_id item.joints = [joints] else: has_gt = False data.append(item) self.has_gt = has_gt return data else: print("Loading pickle data with float coordinates!") file_name = cfg["dataset"].split(".")[0] + ".pickle" with open(os.path.join(self.cfg["project_path"], file_name), "rb") as f: pickledata = pickle.load(f) self.raw_data = pickledata num_images = len(pickledata) # mlab.shape[1] data = [] has_gt = True for i in range(num_images): sample = pickledata[i] # mlab[0, i] item = DataItem() item.image_id = i item.im_path = os.path.join(*sample["image"]) # [0][0] item.im_size = sample["size"] # sample[1][0] if len(sample) >= 3: item.num_animals = len(sample["joints"]) item.joints = [sample["joints"]] else: has_gt = False data.append(item) self.has_gt = has_gt return data