def load_data_test(self, config_dict): dataset = collected_dataset.CollectedDataset( data_folder=config_dict['dataset_folder_test'], img_type=config_dict['img_type'], input_types=config_dict['input_types'], label_types=config_dict['label_types_test']) batch_sampler = collected_dataset.CollectedDatasetSampler( data_folder=config_dict['dataset_folder_test'], useSubjectBatches=0, useCamBatches=config_dict['useCamBatches'], batch_size=config_dict['batch_size_test'], randomize=True, every_nth_frame=100) #config_dict['every_nth_frame']) loader = torch.utils.data.DataLoader( dataset, batch_sampler=batch_sampler, num_workers=config_dict['num_workers'], pin_memory=False, collate_fn=utils_data.default_collate_with_string) if 0: # save data for demo import pickle data_iterator = iter(loader) data_input, data_labels = next( data_iterator) #[next(data_iterator) for i in range(3)] batch_size = 8 input = { 'img': np.array(data_input['img'][:batch_size].numpy(), dtype='float16'), 'bg': np.array(data_input['bg'][:batch_size].numpy(), dtype='float16'), 'R_cam_2_world': np.array(data_input['R_cam_2_world'][:batch_size].numpy(), dtype='float16'), } label = { '3D': np.array(data_labels['3D'][:batch_size].numpy(), dtype='float16'), 'pose_mean': np.array(data_labels['pose_mean'][:batch_size].numpy(), dtype='float16'), 'pose_std': np.array(data_labels['pose_std'][:batch_size].numpy(), dtype='float16') } data_cach = tuple([input, label]) pickle.dump(data_cach, open('../examples/test_set.pickl', "wb")) IPython.embed() exit() return loader
def load_data_test(self,config_dict): dataset = collected_dataset.CollectedDataset(data_folder=config_dict['dataset_folder_test'], input_types=config_dict['input_types'], label_types=config_dict['label_types_test']) batch_sampler = collected_dataset.CollectedDatasetSampler(data_folder=config_dict['dataset_folder_test'], useSubjectBatches=0, useCamBatches=config_dict['useCamBatches'], batch_size=config_dict['batch_size_test'], randomize=True, every_nth_frame=config_dict['every_nth_frame']) loader = torch.utils.data.DataLoader(dataset, batch_sampler=batch_sampler, num_workers=0, pin_memory=False, collate_fn=utils_data.default_collate_with_string) return loader