def process(self, args, outputs): tree = outputs['tree'] if outputs['large']: if not os.path.exists(tree.submodels_path): log.ODM_ERROR( "We reached the merge stage, but %s folder does not exist. Something must have gone wrong at an earlier stage. Check the log and fix possible problem before restarting?" % tree.submodels_path) exit(1) # Merge point clouds if args.merge in ['all', 'pointcloud']: if not io.file_exists(tree.odm_georeferencing_model_laz) or self.rerun(): all_point_clouds = get_submodel_paths(tree.submodels_path, "odm_georeferencing", "odm_georeferenced_model.laz") try: point_cloud.merge(all_point_clouds, tree.odm_georeferencing_model_laz) point_cloud.post_point_cloud_steps(args, tree) except Exception as e: log.ODM_WARNING("Could not merge point cloud: %s (skipping)" % str(e)) else: log.ODM_WARNING("Found merged point cloud in %s" % tree.odm_georeferencing_model_laz) self.update_progress(25) # Merge crop bounds merged_bounds_file = os.path.join(tree.odm_georeferencing, 'odm_georeferenced_model.bounds.gpkg') if not io.file_exists(merged_bounds_file) or self.rerun(): all_bounds = get_submodel_paths(tree.submodels_path, 'odm_georeferencing', 'odm_georeferenced_model.bounds.gpkg') log.ODM_INFO("Merging all crop bounds: %s" % all_bounds) if len(all_bounds) > 0: # Calculate a new crop area # based on the convex hull of all crop areas of all submodels # (without a buffer, otherwise we are double-cropping) Cropper.merge_bounds(all_bounds, merged_bounds_file, 0) else: log.ODM_WARNING("No bounds found for any submodel.") # Merge orthophotos if args.merge in ['all', 'orthophoto']: if not io.dir_exists(tree.odm_orthophoto): system.mkdir_p(tree.odm_orthophoto) if not io.file_exists(tree.odm_orthophoto_tif) or self.rerun(): all_orthos_and_ortho_cuts = get_all_submodel_paths(tree.submodels_path, os.path.join("odm_orthophoto", "odm_orthophoto_feathered.tif"), os.path.join("odm_orthophoto", "odm_orthophoto_cut.tif"), ) if len(all_orthos_and_ortho_cuts) > 1: log.ODM_INFO( "Found %s submodels with valid orthophotos and cutlines" % len(all_orthos_and_ortho_cuts)) # TODO: histogram matching via rasterio # currently parts have different color tones if io.file_exists(tree.odm_orthophoto_tif): os.remove(tree.odm_orthophoto_tif) orthophoto_vars = orthophoto.get_orthophoto_vars(args) orthophoto.merge(all_orthos_and_ortho_cuts, tree.odm_orthophoto_tif, orthophoto_vars) orthophoto.post_orthophoto_steps(args, merged_bounds_file, tree.odm_orthophoto_tif, tree.orthophoto_tiles) elif len(all_orthos_and_ortho_cuts) == 1: # Simply copy log.ODM_WARNING("A single orthophoto/cutline pair was found between all submodels.") shutil.copyfile(all_orthos_and_ortho_cuts[0][0], tree.odm_orthophoto_tif) else: log.ODM_WARNING( "No orthophoto/cutline pairs were found in any of the submodels. No orthophoto will be generated.") else: log.ODM_WARNING("Found merged orthophoto in %s" % tree.odm_orthophoto_tif) self.update_progress(75) # Merge DEMs def merge_dems(dem_filename, human_name): if not io.dir_exists(tree.path('odm_dem')): system.mkdir_p(tree.path('odm_dem')) dem_file = tree.path("odm_dem", dem_filename) if not io.file_exists(dem_file) or self.rerun(): all_dems = get_submodel_paths(tree.submodels_path, "odm_dem", dem_filename) log.ODM_INFO("Merging %ss" % human_name) # Merge dem_vars = utils.get_dem_vars(args) eu_map_source = None # Default # Use DSM's euclidean map for DTMs # (requires the DSM to be computed) if human_name == "DTM": eu_map_source = "dsm" euclidean_merge_dems(all_dems, dem_file, dem_vars, euclidean_map_source=eu_map_source) if io.file_exists(dem_file): # Crop if args.crop > 0: Cropper.crop(merged_bounds_file, dem_file, dem_vars, keep_original=not args.optimize_disk_space) log.ODM_INFO("Created %s" % dem_file) if args.tiles: generate_dem_tiles(dem_file, tree.path("%s_tiles" % human_name.lower()), args.max_concurrency) else: log.ODM_WARNING("Cannot merge %s, %s was not created" % (human_name, dem_file)) else: log.ODM_WARNING("Found merged %s in %s" % (human_name, dem_filename)) if args.merge in ['all', 'dem'] and args.dsm: merge_dems("dsm.tif", "DSM") if args.merge in ['all', 'dem'] and args.dtm: merge_dems("dtm.tif", "DTM") self.update_progress(95) # Merge reports if not io.dir_exists(tree.odm_report): system.mkdir_p(tree.odm_report) geojson_shots = tree.path(tree.odm_report, "shots.geojson") if not io.file_exists(geojson_shots) or self.rerun(): geojson_shots_files = get_submodel_paths(tree.submodels_path, "odm_report", "shots.geojson") log.ODM_INFO("Merging %s shots.geojson files" % len(geojson_shots_files)) merge_geojson_shots(geojson_shots_files, geojson_shots) else: log.ODM_WARNING("Found merged shots.geojson in %s" % tree.odm_report) # Stop the pipeline short! We're done. self.next_stage = None else: log.ODM_INFO("Normal dataset, nothing to merge.") self.progress = 0.0
def process(self, args, outputs): tree = outputs['tree'] reconstruction = outputs['reconstruction'] if outputs['large']: if not os.path.exists(tree.submodels_path): log.ODM_ERROR( "We reached the merge stage, but %s folder does not exist. Something must have gone wrong at an earlier stage. Check the log and fix possible problem before restarting?" % tree.submodels_path) exit(1) # Merge point clouds if args.merge in ['all', 'pointcloud']: if not io.file_exists( tree.odm_georeferencing_model_laz) or self.rerun(): all_point_clouds = get_submodel_paths( tree.submodels_path, "odm_georeferencing", "odm_georeferenced_model.laz") try: # pdal.merge_point_clouds(all_point_clouds, tree.odm_georeferencing_model_laz, args.verbose) entwine.build(all_point_clouds, tree.entwine_pointcloud, max_concurrency=args.max_concurrency, rerun=self.rerun()) except Exception as e: log.ODM_WARNING( "Could not merge point cloud: %s (skipping)" % str(e)) if io.dir_exists(tree.entwine_pointcloud): try: system.run('pdal translate "ept://{}" "{}"'.format( tree.entwine_pointcloud, tree.odm_georeferencing_model_laz)) except Exception as e: log.ODM_WARNING( "Cannot export EPT dataset to LAZ: %s" % str(e)) else: log.ODM_WARNING("Found merged point cloud in %s" % tree.odm_georeferencing_model_laz) self.update_progress(25) # Merge crop bounds merged_bounds_file = os.path.join( tree.odm_georeferencing, 'odm_georeferenced_model.bounds.gpkg') if not io.file_exists(merged_bounds_file) or self.rerun(): all_bounds = get_submodel_paths( tree.submodels_path, 'odm_georeferencing', 'odm_georeferenced_model.bounds.gpkg') log.ODM_INFO("Merging all crop bounds: %s" % all_bounds) if len(all_bounds) > 0: # Calculate a new crop area # based on the convex hull of all crop areas of all submodels # (without a buffer, otherwise we are double-cropping) Cropper.merge_bounds(all_bounds, merged_bounds_file, 0) else: log.ODM_WARNING("No bounds found for any submodel.") # Merge orthophotos if args.merge in ['all', 'orthophoto']: if not io.dir_exists(tree.odm_orthophoto): system.mkdir_p(tree.odm_orthophoto) if not io.file_exists(tree.odm_orthophoto_tif) or self.rerun(): all_orthos_and_cutlines = get_all_submodel_paths( tree.submodels_path, os.path.join("odm_orthophoto", "odm_orthophoto.tif"), os.path.join("odm_orthophoto", "cutline.gpkg"), ) if len(all_orthos_and_cutlines) > 1: log.ODM_INFO( "Found %s submodels with valid orthophotos and cutlines" % len(all_orthos_and_cutlines)) # TODO: histogram matching via rasterio # currently parts have different color tones merged_geotiff = os.path.join( tree.odm_orthophoto, "odm_orthophoto.merged.tif") kwargs = { 'orthophoto_merged': merged_geotiff, 'input_files': ' '.join( map(lambda i: quote(i[0]), all_orthos_and_cutlines)), 'max_memory': get_max_memory(), 'threads': args.max_concurrency, } # use bounds as cutlines (blending) if io.file_exists(merged_geotiff): os.remove(merged_geotiff) system.run('gdal_merge.py -o {orthophoto_merged} ' #'-createonly ' '-co "BIGTIFF=YES" ' '-co "BLOCKXSIZE=512" ' '-co "BLOCKYSIZE=512" ' '--config GDAL_CACHEMAX {max_memory}% ' '{input_files} '.format(**kwargs)) for ortho_cutline in all_orthos_and_cutlines: kwargs['input_file'], kwargs[ 'cutline'] = ortho_cutline # Note: cblend has a high performance penalty system.run( 'gdalwarp -cutline {cutline} ' '-cblend 20 ' '-r bilinear -multi ' '-wo NUM_THREADS={threads} ' '--config GDAL_CACHEMAX {max_memory}% ' '{input_file} {orthophoto_merged}'.format( **kwargs)) # Apply orthophoto settings (compression, tiling, etc.) orthophoto_vars = orthophoto.get_orthophoto_vars(args) if io.file_exists(tree.odm_orthophoto_tif): os.remove(tree.odm_orthophoto_tif) kwargs = { 'vars': ' '.join([ '-co %s=%s' % (k, orthophoto_vars[k]) for k in orthophoto_vars ]), 'max_memory': get_max_memory(), 'merged': merged_geotiff, 'log': tree.odm_orthophoto_tif_log, 'orthophoto': tree.odm_orthophoto_tif, } system.run( 'gdal_translate ' '{vars} ' '--config GDAL_CACHEMAX {max_memory}% ' '{merged} {orthophoto} > {log}'.format(**kwargs)) os.remove(merged_geotiff) # Crop if args.crop > 0: Cropper.crop(merged_bounds_file, tree.odm_orthophoto_tif, orthophoto_vars) # Overviews if args.build_overviews: orthophoto.build_overviews(tree.odm_orthophoto_tif) elif len(all_orthos_and_cutlines) == 1: # Simply copy log.ODM_WARNING( "A single orthophoto/cutline pair was found between all submodels." ) shutil.copyfile(all_orthos_and_cutlines[0][0], tree.odm_orthophoto_tif) else: log.ODM_WARNING( "No orthophoto/cutline pairs were found in any of the submodels. No orthophoto will be generated." ) else: log.ODM_WARNING("Found merged orthophoto in %s" % tree.odm_orthophoto_tif) self.update_progress(75) # Merge DEMs def merge_dems(dem_filename, human_name): if not io.dir_exists(tree.path('odm_dem')): system.mkdir_p(tree.path('odm_dem')) dem_file = tree.path("odm_dem", dem_filename) if not io.file_exists(dem_file) or self.rerun(): all_dems = get_submodel_paths(tree.submodels_path, "odm_dem", dem_filename) log.ODM_INFO("Merging %ss" % human_name) # Merge dem_vars = utils.get_dem_vars(args) euclidean_merge_dems(all_dems, dem_file, dem_vars) if io.file_exists(dem_file): # Crop if args.crop > 0: Cropper.crop(merged_bounds_file, dem_file, dem_vars) log.ODM_INFO("Created %s" % dem_file) else: log.ODM_WARNING("Cannot merge %s, %s was not created" % (human_name, dem_file)) else: log.ODM_WARNING("Found merged %s in %s" % (human_name, dem_filename)) if args.merge in ['all', 'dem'] and args.dsm: merge_dems("dsm.tif", "DSM") if args.merge in ['all', 'dem'] and args.dtm: merge_dems("dtm.tif", "DTM") # Stop the pipeline short! We're done. self.next_stage = None else: log.ODM_INFO("Normal dataset, nothing to merge.") self.progress = 0.0