def produce_lidarviewer(s2poutdir, output): """ Produce a single multiscale point cloud for the whole processed region. Args: tiles: list of tiles dictionaries """ tiles_file = os.path.join(s2poutdir, 'tiles.txt') # Read the tiles file tiles = s2p.read_tiles(tiles_file) print(str(len(tiles)) + ' tiles found') # collect all plys plys = [ os.path.join(os.path.abspath(os.path.dirname(t)), 'cloud.ply') for t in tiles ] nthreads = 4 plys = ' '.join(plys) common.run( "LidarPreprocessor -to %s.LidarO -tp %s.LidarP -nt %d %s -o %s" % (output, output, nthreads, plys, output))
def main(tiles_file, outfile, sub_img): outfile_basename = os.path.basename(outfile) outfile_dirname = os.path.dirname(outfile) output_format = outfile_basename[-3:] print('Output format is ' + output_format) # If output format is tif, we need to generate a temporary vrt # with the same name vrt_basename = outfile_basename if output_format == 'tif': vrt_basename = vrt_basename[:-3] + 'vrt' elif output_format != 'vrt': print('Error: only vrt or tif extension is allowed for output image.') return vrt_name = os.path.join(outfile_dirname, vrt_basename) # Read the tiles file tiles = s2p.read_tiles(tiles_file) print(str(len(tiles)) + ' tiles found') # Compute the global extent of the output image (min_x, max_x, min_y, max_y) = global_extent(tiles) print('Global extent: [%i,%i]x[%i,%i]' % (min_x, max_x, min_y, max_y)) # Now, write all row vrts print("Writing row vrt files " + vrt_basename) vrt_row = write_row_vrts(tiles, sub_img, vrt_basename, min_x, max_x) # Finally, write main vrt print('Writing ' + vrt_name) write_main_vrt(vrt_row, vrt_name, min_x, max_x, min_y, max_y) # If Output format is tif, convert vrt file to tif if output_format == 'tif': print('Converting vrt to tif ...') common.run(('gdal_translate -ot Float32 -co TILED=YES -co' ' BIGTIFF=IF_NEEDED %s %s' % (common.shellquote(vrt_name), common.shellquote(outfile)))) print('Removing temporary vrt files') # Do not use items()/iteritems() here because of python 2 and 3 compat for y in vrt_row: vrt_data = vrt_row[y] row_vrt_filename = os.path.join(vrt_data['vrt_dir'], vrt_basename) try: os.remove(row_vrt_filename) except OSError: pass try: os.remove(vrt_name) except OSError: pass
def end2end_cluster(config): print('Configuration file: ',config) print('Running end2end in sequential mode to get reference DSM ...') test_cfg = s2p.read_config_file(config) test_cfg['skip_existing'] = True s2p.main(test_cfg) outdir = test_cfg['out_dir'] expected = s2plib.common.gdal_read_as_array_with_nans(os.path.join(outdir,'dsm.tif')) print('Running end2end in cluster mode ...') test_cfg_cluster = dict() test_cfg_cluster.update(test_cfg) test_cfg_cluster['out_dir'] = test_cfg_cluster['out_dir'] + "_cluster" test_cfg_cluster['skip_existing'] = True print("Running initialisation step ...") s2p.main(test_cfg_cluster,["initialisation"]) # Retrieve tiles list outdir = test_cfg_cluster['out_dir'] tiles_file = os.path.join(outdir,'tiles.txt') tiles = s2p.read_tiles(tiles_file) print('Found '+str(len(tiles))+' tiles to process') for step in s2p.ALL_STEPS: if s2p.ALL_STEPS[step] is True: print('Running %s on each tile...' % step) for tile in tiles: print('tile : %s' % tile) tile_cfg_cluster = s2p.read_config_file(tile) s2p.main(tile_cfg_cluster, [step]) else: print('Running %s...' % step) print('test_cfg_cluster : %s' % test_cfg_cluster) s2p.main(test_cfg_cluster, [step]) computed = s2plib.common.gdal_read_as_array_with_nans(os.path.join(outdir,'dsm.tif')) end2end_compare_dsm(computed,expected,0,0)
def produce_potree(s2poutdir, potreeoutdir): """ Produce a single multiscale point cloud for the whole processed region. Args: tiles: list of tiles dictionaries """ basedir = os.path.dirname(os.path.abspath(__file__)) test_for_potree(os.path.join(basedir,'PotreeConverter_PLY_toolchain/')) tiles_file = os.path.join(s2poutdir, 'tiles.txt') # Read the tiles file tiles = s2p.read_tiles(tiles_file) print(str(len(tiles))+' tiles found') def plyvertex(fname): with open(fname, 'r', 'utf-8') as f: for x in f: if x.split()[0] == 'element' and x.split()[1] == 'vertex': return int(x.split()[2]) # collect all plys plys = [] for t in tiles: clo = os.path.join(os.path.abspath(os.path.dirname(t)), 'cloud.ply') if os.path.isfile(clo): if plyvertex(clo) > 0 : plys.append(clo) # plys = [os.path.join(os.path.abspath(os.path.dirname(t)), 'cloud.ply') for t in tiles if os.path.isfile(os.path.join(os.path.abspath(os.path.dirname(t)), 'cloud.ply'))] # produce the potree point cloud plys_to_potree(plys, os.path.join(potreeoutdir, 'cloud.potree'), os.path.join(basedir, 'PotreeConverter_PLY_toolchain/'))
def produce_potree(s2p_outdirs_list, potreeoutdir): """ Produce a single multiscale point cloud for the whole processed region. Args: s2poutdirs_list: list of s2p output directories """ basedir = os.path.dirname(os.path.abspath(__file__)) test_for_potree(os.path.join(basedir, 'PotreeConverter_PLY_toolchain/')) def plyvertex(fname): with open(fname, 'r', 'utf-8') as f: for x in f: if x.split()[0] == 'element' and x.split()[1] == 'vertex': return int(x.split()[2]) js_scripts = [] regex = re.compile("Potree\.loadPointCloud\(.*\);", re.DOTALL) cloudoutdir = os.path.join(potreeoutdir, "cloud.potree") # Produce a "cloud_?.html" file for all given s2p outdirs for i, s2p_outdir in enumerate(s2p_outdirs_list): tiles = s2p.read_tiles(os.path.join(s2p_outdir, 'tiles.txt')) print(str(len(tiles)) + ' tiles found') # collect all plys plys = [] for t in tiles: clo = os.path.join(os.path.abspath(os.path.dirname(t)), 'cloud.ply') if os.path.isfile(clo): if plyvertex(clo) > 0: plys.append(clo) # produce the potree point cloud cloud_name = "cloud_{}".format(i) plys_to_potree( plys, cloudoutdir, os.path.join(basedir, 'PotreeConverter_PLY_toolchain/'), cloud_name, ) # Gather the js script inside the HTML file that is relevant # to the point cloud cloud_html = os.path.join(cloudoutdir, "{}.html".format(cloud_name)) with open(cloud_html) as f: soup = BeautifulSoup(f, features="lxml") script = soup.find_all("script")[-1] js_script = re.search(regex, script.text).group(0) js_scripts.append(js_script) os.remove(cloud_html) # The "main.html" file will contain a concatenation of all the js # scripts that were gathered in the loop above. # Use the last HTML file as a basis for the "main.html", and replace # its js script by all the js scripts main_html = os.path.join(cloudoutdir, "main.html") script.string = re.sub(regex, "\n".join(js_scripts), script.text) with open(main_html, "w") as f: f.write(soup.prettify())