def export_to_bounds_files(boundary, proj4, bounds_json_file, bounds_gpkg_file): with open(bounds_json_file, "w") as f: f.write(json.dumps({ "type": "FeatureCollection", "name": "bounds", "features": [{ "type": "Feature", "properties": {}, "geometry": { "type": "Polygon", "coordinates": [boundary] } }] })) if os.path.isfile(bounds_gpkg_file): os.remove(bounds_gpkg_file) kwargs = { 'proj4': proj4, 'input': double_quote(bounds_json_file), 'output': double_quote(bounds_gpkg_file) } system.run('ogr2ogr -overwrite -f GPKG -a_srs "{proj4}" {output} {input}'.format(**kwargs))
def create_bounds_gpkg(self, pointcloud_path, buffer_distance=0, decimation_step=40): """ Compute a buffered polygon around the data extents (not just a bounding box) of the given point cloud. @return filename to Geopackage containing the polygon """ if not os.path.exists(pointcloud_path): log.ODM_WARNING( 'Point cloud does not exist, cannot generate GPKG bounds {}'. format(pointcloud_path)) return '' bounds_geojson_path = self.create_bounds_geojson( pointcloud_path, buffer_distance, decimation_step) summary_file_path = os.path.join( self.storage_dir, '{}.summary.json'.format(self.files_prefix)) export_summary_json(pointcloud_path, summary_file_path) pc_proj4 = None with open(summary_file_path, 'r') as f: json_f = json.loads(f.read()) pc_proj4 = json_f['summary']['srs']['proj4'] if pc_proj4 is None: raise RuntimeError( "Could not determine point cloud proj4 declaration") bounds_gpkg_path = os.path.join( self.storage_dir, '{}.bounds.gpkg'.format(self.files_prefix)) if os.path.isfile(bounds_gpkg_path): os.remove(bounds_gpkg_path) # Convert bounds to GPKG kwargs = { 'input': double_quote(bounds_geojson_path), 'output': double_quote(bounds_gpkg_path), 'proj4': pc_proj4 } run('ogr2ogr -overwrite -f GPKG -a_srs "{proj4}" {output} {input}'. format(**kwargs)) return bounds_gpkg_path
def run_pipeline(json, verbose=False): """ Run PDAL Pipeline with provided JSON """ if verbose: json_print(json) # write to temp file f, jsonfile = tempfile.mkstemp(suffix='.json') if verbose: log.ODM_INFO('Pipeline file: %s' % jsonfile) os.write(f, jsonlib.dumps(json).encode('utf8')) os.close(f) cmd = ['pdal', 'pipeline', '-i %s' % double_quote(jsonfile)] if verbose or sys.platform == 'win32': system.run(' '.join(cmd)) else: system.run(' '.join(cmd) + ' > /dev/null 2>&1') os.remove(jsonfile)
log.ODM_INFO('==============') progressbc.set_project_name(args.name) # Add project dir if doesn't exist args.project_path = os.path.join(args.project_path, args.name) if not io.dir_exists(args.project_path): log.ODM_WARNING('Directory %s does not exist. Creating it now.' % args.name) system.mkdir_p(os.path.abspath(args.project_path)) # If user asks to rerun everything, delete all of the existing progress directories. if args.rerun_all: log.ODM_INFO("Rerun all -- Removing old data") os.system("rm -rf " + " ".join([ double_quote(os.path.join(args.project_path, p)) for p in get_processing_results_paths() ] + [ double_quote(os.path.join(args.project_path, "odm_meshing")), double_quote(os.path.join(args.project_path, "opensfm")), double_quote(os.path.join(args.project_path, "odm_texturing_25d")), double_quote(os.path.join(args.project_path, "odm_filterpoints")), double_quote(os.path.join(args.project_path, "submodels")), ])) app = ODMApp(args) retcode = app.execute() # Do not show ASCII art for local submodels runs if retcode == 0 and not "submodels/submodel_" in args.project_path: log.ODM_INFO(
def process(self, args, outputs): tree = outputs['tree'] reconstruction = outputs['reconstruction'] verbose = '-verbose' if args.verbose else '' # define paths and create working directories system.mkdir_p(tree.odm_orthophoto) if not io.file_exists(tree.odm_orthophoto_tif) or self.rerun(): gsd_error_estimate = 0.1 ignore_resolution = False if not reconstruction.is_georeferenced(): # Match DEMs gsd_error_estimate = -3 ignore_resolution = True resolution = 1.0 / ( gsd.cap_resolution(args.orthophoto_resolution, tree.opensfm_reconstruction, gsd_error_estimate=gsd_error_estimate, ignore_gsd=args.ignore_gsd, ignore_resolution=ignore_resolution, has_gcp=reconstruction.has_gcp()) / 100.0) # odm_orthophoto definitions kwargs = { 'odm_ortho_bin': context.odm_orthophoto_path, 'log': tree.odm_orthophoto_log, 'ortho': tree.odm_orthophoto_render, 'corners': tree.odm_orthophoto_corners, 'res': resolution, 'bands': '', 'verbose': verbose } models = [] if args.use_3dmesh: base_dir = tree.odm_texturing else: base_dir = tree.odm_25dtexturing model_file = tree.odm_textured_model_obj if reconstruction.multi_camera: for band in reconstruction.multi_camera: primary = band['name'] == get_primary_band_name( reconstruction.multi_camera, args.primary_band) subdir = "" if not primary: subdir = band['name'].lower() models.append(os.path.join(base_dir, subdir, model_file)) kwargs['bands'] = '-bands %s' % (','.join([ double_quote(b['name']) for b in reconstruction.multi_camera ])) else: models.append(os.path.join(base_dir, model_file)) kwargs['models'] = ','.join(map(double_quote, models)) # run odm_orthophoto system.run( '"{odm_ortho_bin}" -inputFiles {models} ' '-logFile "{log}" -outputFile "{ortho}" -resolution {res} {verbose} ' '-outputCornerFile "{corners}" {bands}'.format(**kwargs)) # Create georeferenced GeoTiff geotiffcreated = False if reconstruction.is_georeferenced(): ulx = uly = lrx = lry = 0.0 with open(tree.odm_orthophoto_corners) as f: for lineNumber, line in enumerate(f): if lineNumber == 0: tokens = line.split(' ') if len(tokens) == 4: ulx = float(tokens[0]) + \ float(reconstruction.georef.utm_east_offset) lry = float(tokens[1]) + \ float(reconstruction.georef.utm_north_offset) lrx = float(tokens[2]) + \ float(reconstruction.georef.utm_east_offset) uly = float(tokens[3]) + \ float(reconstruction.georef.utm_north_offset) log.ODM_INFO('Creating GeoTIFF') orthophoto_vars = orthophoto.get_orthophoto_vars(args) kwargs = { 'ulx': ulx, 'uly': uly, 'lrx': lrx, 'lry': lry, 'vars': ' '.join([ '-co %s=%s' % (k, orthophoto_vars[k]) for k in orthophoto_vars ]), 'proj': reconstruction.georef.proj4(), 'input': tree.odm_orthophoto_render, 'output': tree.odm_orthophoto_tif, 'log': tree.odm_orthophoto_tif_log, 'max_memory': get_max_memory(), } system.run('gdal_translate -a_ullr {ulx} {uly} {lrx} {lry} ' '{vars} ' '-a_srs \"{proj}\" ' '--config GDAL_CACHEMAX {max_memory}% ' '--config GDAL_TIFF_INTERNAL_MASK YES ' '"{input}" "{output}" > "{log}"'.format(**kwargs)) bounds_file_path = os.path.join( tree.odm_georeferencing, 'odm_georeferenced_model.bounds.gpkg') # Cutline computation, before cropping # We want to use the full orthophoto, not the cropped one. if args.orthophoto_cutline: cutline_file = os.path.join(tree.odm_orthophoto, "cutline.gpkg") compute_cutline(tree.odm_orthophoto_tif, bounds_file_path, cutline_file, args.max_concurrency, scale=0.25) orthophoto.compute_mask_raster( tree.odm_orthophoto_tif, cutline_file, os.path.join(tree.odm_orthophoto, "odm_orthophoto_cut.tif"), blend_distance=20, only_max_coords_feature=True) orthophoto.post_orthophoto_steps(args, bounds_file_path, tree.odm_orthophoto_tif, tree.orthophoto_tiles) # Generate feathered orthophoto also if args.orthophoto_cutline: orthophoto.feather_raster( tree.odm_orthophoto_tif, os.path.join(tree.odm_orthophoto, "odm_orthophoto_feathered.tif"), blend_distance=20) geotiffcreated = True if not geotiffcreated: if io.file_exists(tree.odm_orthophoto_render): pseudogeo.add_pseudo_georeferencing( tree.odm_orthophoto_render) log.ODM_INFO( "Renaming %s --> %s" % (tree.odm_orthophoto_render, tree.odm_orthophoto_tif)) os.replace(tree.odm_orthophoto_render, tree.odm_orthophoto_tif) else: log.ODM_WARNING( "Could not generate an orthophoto (it did not render)") else: log.ODM_WARNING('Found a valid orthophoto in: %s' % tree.odm_orthophoto_tif) if args.optimize_disk_space and io.file_exists( tree.odm_orthophoto_render): os.remove(tree.odm_orthophoto_render)
def crop(gpkg_path, geotiff_path, gdal_options, keep_original=True, warp_options=[]): if not os.path.exists(gpkg_path) or not os.path.exists(geotiff_path): log.ODM_WARNING( "Either {} or {} does not exist, will skip cropping.".format( gpkg_path, geotiff_path)) return geotiff_path log.ODM_INFO("Cropping %s" % geotiff_path) # Rename original file # path/to/odm_orthophoto.tif --> path/to/odm_orthophoto.original.tif path, filename = os.path.split(geotiff_path) # path = path/to # filename = odm_orthophoto.tif basename, ext = os.path.splitext(filename) # basename = odm_orthophoto # ext = .tif original_geotiff = os.path.join(path, "{}.original{}".format(basename, ext)) os.replace(geotiff_path, original_geotiff) try: kwargs = { 'gpkg_path': double_quote(gpkg_path), 'geotiffInput': double_quote(original_geotiff), 'geotiffOutput': double_quote(geotiff_path), 'options': ' '.join( map(lambda k: '-co {}={}'.format(k, gdal_options[k]), gdal_options)), 'warpOptions': ' '.join(warp_options), 'max_memory': get_max_memory() } run('gdalwarp -cutline {gpkg_path} ' '-crop_to_cutline ' '{options} ' '{warpOptions} ' '{geotiffInput} ' '{geotiffOutput} ' '--config GDAL_CACHEMAX {max_memory}%'.format(**kwargs)) if not keep_original: os.remove(original_geotiff) except Exception as e: log.ODM_WARNING( 'Something went wrong while cropping: {}'.format(e)) # Revert rename os.replace(original_geotiff, geotiff_path) return geotiff_path
log.ODM_INFO('%s: %s' % (k, args_dict[k])) log.ODM_INFO('==============') progressbc.set_project_name(args.name) # Add project dir if doesn't exist args.project_path = os.path.join(args.project_path, args.name) if not io.dir_exists(args.project_path): log.ODM_WARNING('Directory %s does not exist. Creating it now.' % args.name) system.mkdir_p(os.path.abspath(args.project_path)) # If user asks to rerun everything, delete all of the existing progress directories. if args.rerun_all: log.ODM_INFO("Rerun all -- Removing old data") os.system("rm -rf " + " ".join([double_quote(os.path.join(args.project_path, p)) for p in get_processing_results_paths()] + [ double_quote(os.path.join(args.project_path, "odm_meshing")), double_quote(os.path.join(args.project_path, "opensfm")), double_quote(os.path.join(args.project_path, "odm_texturing_25d")), double_quote(os.path.join(args.project_path, "odm_filterpoints")), double_quote(os.path.join(args.project_path, "submodels")), ])) app = ODMApp(args) retcode = app.execute() # Do not show ASCII art for local submodels runs if retcode == 0 and not "submodels/submodel_" in args.project_path: log.ODM_INFO('MMMMMMMMMMMNNNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNNNMMMMMMMMMMM') log.ODM_INFO('MMMMMMdo:..---../sNMMMMMMMMMMMMMMMMMMMMMMMMMMNs/..---..:odMMMMMM') log.ODM_INFO('MMMMy-.odNMMMMMNy/`/mMMMMMMMMMMMMMMMMMMMMMMm/`/hNMMMMMNdo.-yMMMM')