def main(): """ Convert TXT to PLY. """ parser = argparse.ArgumentParser(description='Convert TXT to PLY.') parser.add_argument('input', type=str, help='TXT file.') parser.add_argument('output', type=str, help='PLY file.') args = parser.parse_args() if not os.path.exists(args.input): print('Input file does not exist.') exit(1) point_cloud = PointCloud.from_txt(args.input) print('Read %s.' % args.input) point_cloud.to_ply(args.output) print('Wrote %s.' % args.output)
print('[Data] read ' + common.filename(config, 'part_space_file', '_f.h5', dataset)) #statistics = statistics.reshape(1, 1, statistics.shape[0], statistics.shape[1], statistics.shape[2]) #statistics = np.repeat(statistics, space.shape[0], axis=0) #print(space.shape, statistics.shape) #invalid_space = space*statistics points = [] point_dir = common.filename(config, 'bounding_box_txt_directory', '', dataset) + '/' for file in os.listdir(point_dir): point_file = point_dir + file point_cloud = PointCloud.from_txt(point_file) #print('[Data] read ' + point_file) points.append(point_cloud.points.shape[0]) frames = [1] * inputs.shape[0] frame_dir = common.filename(config, 'velodyne_individual_gt_txt_directory', '', dataset) + '/' for i in range(inputs.shape[0]): for k in range(-config['gt_range'], config['gt_range'] + 1, config['gt_skip']): if k == 0: continue txt_file = frame_dir + '%d_%d_%d.txt' % (i, k, frames[i])
from point_cloud import PointCloud if __name__ == '__main__': if len(sys.argv) < 2: print('[Data] Usage python 13_ply_observations.py config_folder') exit(1) config_folder = sys.argv[1] + '/' assert os.path.exists( config_folder), 'directory %s does not exist' % config_folder config_files = [ config_file for config_file in os.listdir(config_folder) if not (config_file.find('prior') > 0) ] for config_file in config_files: print('[Data] reading ' + config_folder + config_file) config = utils.read_json(config_folder + config_file) for k in range(config['n_observations']): txt_directory = common.dirname(config, 'txt_gt_dir') + str(k) + '/' assert os.path.exists(txt_directory) ply_directory = common.dirname(config, 'ply_gt_dir') + str(k) + '/' if not os.path.exists(ply_directory): os.makedirs(ply_directory) for filename in os.listdir(txt_directory): point_cloud = PointCloud.from_txt(txt_directory + filename) point_cloud.to_ply(ply_directory + filename[:-4] + '.ply') print('[Data] wrote ' + ply_directory + filename[:-4] + '.ply')
inputs.shape[3], inputs.shape[4])) print('[Data] read ' + common.filename(config, 'input_file')) space = utils.read_hdf5(common.filename(config, 'space_file')) space = space.reshape( (space.shape[0] * n_observations, 1, space.shape[2], space.shape[3], space.shape[4])) print('[Data] read ' + common.filename(config, 'space_file')) points = [] point_dir = common.dirname(config, 'txt_gt_dir') + '/' for k in range(n_observations): k_point_dir = point_dir + '/%d/' % k for txt_file in os.listdir(k_point_dir): point_cloud = PointCloud.from_txt(k_point_dir + txt_file) points.append(point_cloud.points.shape[0]) points = np.array(points) occupied = np.squeeze( np.sum(np.sum(np.sum(targets, axis=4), axis=3), axis=2)) observed_points = np.squeeze( np.sum(np.sum(np.sum(inputs, axis=4), axis=3), axis=2)) observed_space = np.squeeze( np.sum(np.sum(np.sum(space, axis=4), axis=3), axis=2)) mask = np.zeros(inputs.shape) mask[inputs == 1] = 1 mask[space == 1] = 1 observed_total = float(np.sum(mask))
print('[Data] Usage python create_cuboids.py config_folder') exit(1) config_file = sys.argv[1] assert os.path.exists(config_file) config = utils.read_json(config_file) dataset = ntpath.basename(config_file)[:-5] orig_dir = common.filename(config, 'bounding_box_txt_directory', '', dataset) gt_dir = common.filename(config, 'velodyne_gt_txt_directory', '', dataset) gt_files = utils.read_ordered_directory(gt_dir) for i in range(len(gt_files)): orig_file = orig_dir + '/' + ntpath.basename(gt_files[i]) orig_point_cloud = PointCloud.from_txt(orig_file) gt_point_cloud = PointCloud.from_txt(gt_files[i]) print('[Data] read ' + orig_file) print('[Data] read ' + gt_files[i]) fig = plt.figure() ax = fig.add_subplot(111, projection='3d') xx = gt_point_cloud.points[:, 0] yy = gt_point_cloud.points[:, 1] zz = gt_point_cloud.points[:, 2] ax.scatter(xx, yy, zz, c='b', marker='+', s=25) xx = orig_point_cloud.points[:, 0] yy = orig_point_cloud.points[:, 1]
i_ltsdf_inputs[k] = np.min(ltsdf_inputs[perm], axis=0) # Also fuse the actual point clouds! txt_directories = utils.read_ordered_directory( common.dirname(config, 'txt_gt_dir')) txt_directory = common.dirname(config, 'txt_gt_dir', i) + '%d/' % k utils.makedir(txt_directory) for n in range(N): point_cloud = PointCloud() print('[Data] +') for j in range(perm.shape[0]): txt_file = txt_directories[perm[j]] + '/%d.txt' % n print('[Data] | read ' + txt_file) point_cloud_j = PointCloud.from_txt(txt_file) point_cloud.points = np.concatenate( (point_cloud.points, point_cloud_j.points), axis=0) txt_file = txt_directory + '%d.txt' % n point_cloud.to_txt(txt_file) print('[Data] wrote ' + txt_file) i_inputs = np.swapaxes(i_inputs, 0, 1) i_space = np.swapaxes(i_space, 0, 1) i_sdf_inputs = np.swapaxes(i_sdf_inputs, 0, 1) i_tsdf_inputs = np.swapaxes(i_tsdf_inputs, 0, 1) i_ltsdf_inputs = np.swapaxes(i_ltsdf_inputs, 0, 1) print(i_inputs.shape) print(i_space.shape)