def nuscenes_data_prep(root_path, version, dataset_name='NuScenesDataset', max_sweeps=10, use_flat_vehicle_coords=False, use_second_format_direction=False): assert not use_flat_vehicle_coords, 'use_flat_vehicle_coords is not supported anymore.' assert not use_second_format_direction, 'use_second_format_direction is not supported anymore.' # Note: replace 'splits' to 'train', 'val' or 'test' postfix = 'zero_slope' if use_flat_vehicle_coords else 'slope' filename_template = f'infos_splits_{postfix}_sweepsN.pkl' filename_template = filename_template.replace('N', str(max_sweeps)) nu_ds.create_nuscenes_infos(root_path, version=version, filename_template=filename_template, max_sweeps=max_sweeps, use_flat_vehicle_coords=use_flat_vehicle_coords, use_second_format_direction=use_second_format_direction) # filename_template = f'infos_train_slope_sweeps{max_sweeps}_12.pkl' split = 'test' if 'test' in version else 'train' name = filename_template.replace('splits', split) create_groundtruth_database( dataset_class_name=dataset_name, data_path=root_path, info_path=Path(root_path) / name, db_dirname=f'gt_database_sweeps{max_sweeps}', )
def nuscenes_data_prep(root_path, version, dataset_name, max_sweeps=10): nu_ds.create_nuscenes_infos(root_path, version=version, max_sweeps=max_sweeps) name = "infos_train.pkl" if version == "v1.0-test": name = "infos_test.pkl" create_groundtruth_database(dataset_name, root_path, Path(root_path) / name)
def nuscenes_data_prep(root_path, version, dataset_name, max_sweeps=10): root_path = '/mnt/sdd/jhyoo/dataset/NUSCENES' home_path = Path('/home/spalab/jskim_2/second.pytorch/second/dataset') # nu_ds.create_nuscenes_infos(root_path, version=version, max_sweeps=max_sweeps) name = "infos_train.pkl" if version == "v1.0-test": name = "infos_test.pkl" database_save_path = home_path / 'gt_database' db_info_save_path = home_path / "kitti_dbinfos_train.pkl" create_groundtruth_database(dataset_class_name=dataset_name, data_path=root_path, info_path=home_path / name, database_save_path=database_save_path, db_info_save_path=db_info_save_path)
def nuscenes_data_prep(root_path, version, max_sweeps=10): nu_ds.create_nuscenes_infos(root_path, version=version, max_sweeps=max_sweeps) name = "infos_train.pkl" if version == "v1.0-test": name = "infos_test.pkl" if max_sweeps == 0: create_groundtruth_database("NuScenesDataset", root_path, Path(root_path) / "infos_train.pkl") else: print( "WARNING: ground truth database will be disabled because sweeps don't support this." )
def nuscenes_data_prep(root_path, version, dataset_name, max_sweeps=10): root_path = '/mnt/sdd/jhyoo/dataset/NUSCENES' home_path = Path('/home/spalab/jskim_2/second.pytorch/second/dataset') modality = 'radar' radar_version = 'version2' nu_rad_ds.create_nuscenes_infos(root_path, version=version, modality=modality, radar_version=radar_version, max_sweeps=max_sweeps) name = f"infos_train_multisweep_{radar_version}.pkl" if version == "v1.0-test": name = "infos_test.pkl" database_save_path = home_path / f'gt_database_multisweep_{radar_version}' db_info_save_path = home_path / f"kitti_dbinfos_multisweep_{radar_version}_train.pkl" create_groundtruth_database(dataset_class_name=dataset_name, data_path=root_path, info_path=Path(root_path) / name, database_save_path=database_save_path, db_info_save_path=db_info_save_path)
def kitti_gt_fgm_data_prep(old_root_path, old_trainval_info_path, new_root_path, new_train_info_path): from second.core import box_np_ops from second.data.dataset import get_dataset_class dataset = get_dataset_class('KittiDataset')( root_path=old_root_path, info_path=old_trainval_info_path) for i in trange(len(dataset)): image_idx = i sensor_data = dataset.get_sensor_data(i) if 'image_idx' in sensor_data['metadata']: image_idx = sensor_data['metadata']['image_idx'] points = sensor_data['lidar']['points'] annos = sensor_data['lidar']['annotations'] gt_boxes = annos['boxes'] gt_mask = box_np_ops.points_in_rbbox(points, gt_boxes) points_aug = np.concatenate( (points, gt_mask.max(axis=1, keepdims=True)), axis=1) points_aug = points_aug.astype(np.float32) velo_file = 'training/velodyne_reduced/%06d.bin' % (image_idx) with open(f'{new_root_path}/{velo_file}', 'w') as f: points_aug.tofile(f) create_groundtruth_database(dataset_class_name='KittiFGMDataset', data_path=new_root_path, info_path=new_train_info_path)
def kitti_data_prep(root_path): kitti_ds.create_kitti_info_file(root_path) kitti_ds.create_reduced_point_cloud(root_path) create_groundtruth_database("KittiDataset", root_path, Path(root_path) / "kitti_infos_train.pkl")
def nuscenes_data_prep(root_path, version): nu_ds.create_nuscenes_infos(root_path, version=version) create_groundtruth_database("NuScenesDataset", root_path, Path(root_path) / "infos_train.pkl")
def appolo_data_prep(root_path="/scratch2/wdong/appolo/data"): appolo_ds.create_kitti_info_file(root_path) create_groundtruth_database("AppoloDataset", root_path, Path(root_path) / "kitti_infos_train.pkl") pass
def udi_data_prep(root_path, dataset_name): udi_ds.create_udi_infos(root_path) name = "infos_udi_train.pkl" create_groundtruth_database(dataset_name, root_path, Path(root_path) / name)
def lyft_data_prep(root_path, json_path, dataset_name, max_sweeps=10): lyft_ds.create_lyft_infos(root_path, json_path, max_sweeps=max_sweeps) name = "infos_train.pkl" create_groundtruth_database(dataset_name, root_path, Path(root_path) / name)