Exemple #1
0
def generate_colored_hillshade(geotiff):
    relief_file = os.path.join(os.path.dirname(__file__), "color_relief.txt")
    hsv_merge_script = os.path.join(os.path.dirname(__file__), "hsv_merge.py")
    colored_dem = io.related_file_path(geotiff, postfix="color")
    hillshade_dem = io.related_file_path(geotiff, postfix="hillshade")
    colored_hillshade_dem = io.related_file_path(geotiff,
                                                 postfix="colored_hillshade")
    try:
        outputs = [colored_dem, hillshade_dem, colored_hillshade_dem]

        # Cleanup previous
        for f in outputs:
            if os.path.isfile(f):
                os.remove(f)

        system.run('gdaldem color-relief "%s" "%s" "%s" -alpha -co ALPHA=YES' %
                   (geotiff, relief_file, colored_dem))
        system.run(
            'gdaldem hillshade "%s" "%s" -z 1.0 -s 1.0 -az 315.0 -alt 45.0' %
            (geotiff, hillshade_dem))
        system.run('%s "%s" "%s" "%s" "%s"' %
                   (sys.executable, hsv_merge_script, colored_dem,
                    hillshade_dem, colored_hillshade_dem))

        return outputs
    except Exception as e:
        log.ODM_WARNING("Cannot generate colored hillshade: %s" % str(e))
        return (None, None, None)
Exemple #2
0
def convert_to_cogeo(src_path, blocksize=256, max_workers=1):
    """
    Guarantee that the .tif passed as an argument is a Cloud Optimized GeoTIFF (cogeo)
    The file is destructively converted into a cogeo.
    If the file cannot be converted, the function does not change the file
    :param src_path: path to GeoTIFF
    :return: True on success
    """

    if not os.path.isfile(src_path):
        logger.warning("Cannot convert to cogeo: %s (file does not exist)" %
                       src_path)
        return False

    log.ODM_INFO("Optimizing %s as Cloud Optimized GeoTIFF" % src_path)

    tmpfile = io.related_file_path(src_path, postfix='_cogeo')
    swapfile = io.related_file_path(src_path, postfix='_cogeo_swap')

    kwargs = {
        'threads': max_workers if max_workers else 'ALL_CPUS',
        'blocksize': blocksize,
        'max_memory': get_max_memory(),
        'src_path': src_path,
        'tmpfile': tmpfile,
    }

    try:
        system.run("gdal_translate "
                   "-of COG "
                   "-co NUM_THREADS={threads} "
                   "-co BLOCKSIZE={blocksize} "
                   "-co COMPRESS=deflate "
                   "-co BIGTIFF=IF_SAFER "
                   "-co RESAMPLING=NEAREST "
                   "--config GDAL_CACHEMAX {max_memory}% "
                   "--config GDAL_NUM_THREADS {threads} "
                   "\"{src_path}\" \"{tmpfile}\" ".format(**kwargs))
    except Exception as e:
        log.ODM_WARNING("Cannot create Cloud Optimized GeoTIFF: %s" % str(e))

    if os.path.isfile(tmpfile):
        shutil.move(src_path, swapfile)  # Move to swap location

        try:
            shutil.move(tmpfile, src_path)
        except IOError as e:
            log.ODM_WARNING("Cannot move %s to %s: %s" %
                            (tmpfile, src_path, str(e)))
            shutil.move(swapfile, src_path)  # Attempt to restore

        if os.path.isfile(swapfile):
            os.remove(swapfile)

        return True
    else:
        return False
Exemple #3
0
def build(input_point_cloud_files,
          output_path,
          max_concurrency=8,
          rerun=False):
    if len(input_point_cloud_files) == 0:
        log.ODM_WARNING("No input point cloud files to process")
        return

    tmpdir = io.related_file_path(output_path, postfix="-tmp")

    if rerun and io.dir_exists(output_path):
        log.ODM_WARNING("Removing previous EPT directory: %s" % output_path)
        shutil.rmtree(output_path)

    kwargs = {
        'threads': max_concurrency,
        'tmpdir': tmpdir,
        'input': "-i " + " ".join(map(quote, input_point_cloud_files)),
        'outputdir': output_path
    }

    system.run(
        "entwine build --threads {threads} --tmp {tmpdir} {input} -o {outputdir}"
        .format(**kwargs))

    if os.path.exists(tmpdir):
        shutil.rmtree(tmpdir)
Exemple #4
0
def build(input_point_cloud_files,
          output_path,
          max_concurrency=8,
          rerun=False):
    num_files = len(input_point_cloud_files)
    if num_files == 0:
        log.ODM_WARNING("No input point cloud files to process")
        return

    tmpdir = io.related_file_path(output_path, postfix="-tmp")

    if rerun and io.dir_exists(output_path):
        log.ODM_WARNING("Removing previous EPT directory: %s" % output_path)
        shutil.rmtree(output_path)

    kwargs = {
        # 'threads': max_concurrency,
        'tmpdir': tmpdir,
        'files': "--files " + " ".join(map(quote, input_point_cloud_files)),
        'outputdir': output_path
    }

    # Run untwine
    system.run(
        'untwine --temp_dir "{tmpdir}" {files} --output_dir "{outputdir}"'.
        format(**kwargs))

    # Cleanup
    if os.path.exists(tmpdir):
        shutil.rmtree(tmpdir)
Exemple #5
0
def build_copc(input_point_cloud_files, output_file):
    if len(input_point_cloud_files) == 0:
        logger.ODM_WARNING("Cannot build COPC, no input files")
        return

    base_path, ext = os.path.splitext(output_file)
    tmpdir = io.related_file_path(base_path, postfix="-tmp")
    if os.path.exists(tmpdir):
        log.ODM_WARNING("Removing previous directory %s" % tmpdir)
        shutil.rmtree(tmpdir)

    kwargs = {
        'tmpdir': tmpdir,
        'files':
        "--files " + " ".join(map(double_quote, input_point_cloud_files)),
        'output': output_file
    }

    # Run untwine
    system.run(
        'untwine --temp_dir "{tmpdir}" {files} -o "{output}" --single_file'.
        format(**kwargs))

    if os.path.exists(tmpdir):
        shutil.rmtree(tmpdir)
Exemple #6
0
def compute_cutline(orthophoto_file, crop_area_file, destination, max_concurrency=1, tmpdir=None, scale=1):
    if io.file_exists(orthophoto_file) and io.file_exists(crop_area_file):
        from opendm.grass_engine import grass
        log.ODM_DEBUG("Computing cutline")

        if tmpdir and not io.dir_exists(tmpdir):
            system.mkdir_p(tmpdir)

        scale = max(0.0001, min(1, scale))
        scaled_orthophoto = None

        if scale < 1:
            log.ODM_DEBUG("Scaling orthophoto to %s%% to compute cutline" % (scale * 100))

            scaled_orthophoto = os.path.join(tmpdir, os.path.basename(io.related_file_path(orthophoto_file, postfix=".scaled")))
            # Scale orthophoto before computing cutline
            system.run("gdal_translate -outsize {}% 0 "
                "-co NUM_THREADS={} "
                "--config GDAL_CACHEMAX {}% "
                "{} {}".format(
                scale * 100,
                max_concurrency,
                concurrency.get_max_memory(),
                orthophoto_file,
                scaled_orthophoto
            ))
            orthophoto_file = scaled_orthophoto

        try:
            ortho_width,ortho_height = get_image_size.get_image_size(orthophoto_file, fallback_on_error=False)
            log.ODM_DEBUG("Orthophoto dimensions are %sx%s" % (ortho_width, ortho_height))
            number_lines = int(max(8, math.ceil(min(ortho_width, ortho_height) / 256.0)))
        except:
            log.ODM_DEBUG("Cannot compute orthophoto dimensions, setting arbitrary number of lines.")
            number_lines = 32
        
        log.ODM_DEBUG("Number of lines: %s" % number_lines)

        gctx = grass.create_context({'auto_cleanup' : False, 'tmpdir': tmpdir})
        gctx.add_param('orthophoto_file', orthophoto_file)
        gctx.add_param('crop_area_file', crop_area_file)
        gctx.add_param('number_lines', number_lines)
        gctx.add_param('max_concurrency', max_concurrency)
        gctx.add_param('memory', int(concurrency.get_max_memory_mb(300)))
        gctx.set_location(orthophoto_file)

        cutline_file = gctx.execute(os.path.join("opendm", "grass", "compute_cutline.grass"))
        if cutline_file != 'error':
            if io.file_exists(cutline_file):
                shutil.move(cutline_file, destination)
                log.ODM_INFO("Generated cutline file: %s --> %s" % (cutline_file, destination))
                gctx.cleanup()
                return destination
            else:
                log.ODM_WARNING("Unexpected script result: %s. No cutline file has been generated." % cutline_file)
        else:
            log.ODM_WARNING("Could not generate orthophoto cutline. An error occured when running GRASS. No orthophoto will be generated.")
    else:
        log.ODM_WARNING("We've been asked to compute cutline, but either %s or %s is missing. Skipping..." % (orthophoto_file, crop_area_file))
Exemple #7
0
 def run_filter(pcs):
     # Recurse
     filter(pcs['path'], io.related_file_path(pcs['path'], postfix="_filtered"), 
                 standard_deviation=standard_deviation, 
                 meank=meank, 
                 sample_radius=sample_radius, 
                 verbose=verbose,
                 max_concurrency=1)
Exemple #8
0
def generate_dem_tiles(geotiff, output_dir, max_concurrency):
    relief_file = os.path.join(os.path.dirname(__file__), "color_relief.txt")
    hsv_merge_script = os.path.join(os.path.dirname(__file__), "hsv_merge.py")
    colored_dem = io.related_file_path(geotiff, postfix="color")
    hillshade_dem = io.related_file_path(geotiff, postfix="hillshade")
    colored_hillshade_dem = io.related_file_path(geotiff, postfix="colored_hillshade")

    try:
        system.run('gdaldem color-relief "%s" "%s" "%s" -alpha -co ALPHA=YES' % (geotiff, relief_file, colored_dem))
        system.run('gdaldem hillshade "%s" "%s" -z 1.0 -s 1.0 -az 315.0 -alt 45.0' % (geotiff, hillshade_dem))
        system.run('python3 "%s" "%s" "%s" "%s"' % (hsv_merge_script, colored_dem, hillshade_dem, colored_hillshade_dem))
        generate_tiles(colored_hillshade_dem, output_dir, max_concurrency)

        # Cleanup
        for f in [colored_dem, hillshade_dem, colored_hillshade_dem]:
            if os.path.isfile(f):
                os.remove(f)
    except Exception as e:
        log.ODM_WARNING("Cannot generate DEM tiles: %s" % str(e))
Exemple #9
0
def post_point_cloud_steps(args, tree, rerun=False):
    # XYZ point cloud output
    if args.pc_csv:
        log.ODM_INFO("Creating CSV file (XYZ format)")

        if not io.file_exists(tree.odm_georeferencing_xyz_file) or rerun:
            system.run("pdal translate -i \"{}\" "
                       "-o \"{}\" "
                       "--writers.text.format=csv "
                       "--writers.text.order=\"X,Y,Z\" "
                       "--writers.text.keep_unspecified=false ".format(
                           tree.odm_georeferencing_model_laz,
                           tree.odm_georeferencing_xyz_file))
        else:
            log.ODM_WARNING("Found existing CSV file %s" %
                            tree.odm_georeferencing_xyz_file)

    # LAS point cloud output
    if args.pc_las:
        log.ODM_INFO("Creating LAS file")

        if not io.file_exists(tree.odm_georeferencing_model_las) or rerun:
            system.run("pdal translate -i \"{}\" "
                       "-o \"{}\" ".format(tree.odm_georeferencing_model_laz,
                                           tree.odm_georeferencing_model_las))
        else:
            log.ODM_WARNING("Found existing LAS file %s" %
                            tree.odm_georeferencing_xyz_file)

    # EPT point cloud output
    if args.pc_ept:
        log.ODM_INFO("Creating Entwine Point Tile output")
        entwine.build([tree.odm_georeferencing_model_laz],
                      tree.entwine_pointcloud,
                      max_concurrency=args.max_concurrency,
                      rerun=rerun)

    # COPC point clouds
    if args.pc_copc:
        log.ODM_INFO("Creating Cloud Optimized Point Cloud (COPC)")

        copc_output = io.related_file_path(tree.odm_georeferencing_model_laz,
                                           postfix=".copc")
        entwine.build_copc([tree.odm_georeferencing_model_laz], copc_output)
Exemple #10
0
def build(input_point_cloud_files,
          output_path,
          max_concurrency=8,
          rerun=False):
    num_files = len(input_point_cloud_files)
    if num_files == 0:
        log.ODM_WARNING("No input point cloud files to process")
        return

    tmpdir = io.related_file_path(output_path, postfix="-tmp")

    def dir_cleanup():
        if io.dir_exists(output_path):
            log.ODM_WARNING("Removing previous EPT directory: %s" %
                            output_path)
            shutil.rmtree(output_path)

        if io.dir_exists(tmpdir):
            log.ODM_WARNING("Removing previous EPT temp directory: %s" %
                            tmpdir)
            shutil.rmtree(tmpdir)

    if rerun:
        dir_cleanup()

    # Attempt with entwine (faster, more memory hungry)
    try:
        build_entwine(input_point_cloud_files,
                      tmpdir,
                      output_path,
                      max_concurrency=max_concurrency)
    except Exception as e:
        log.ODM_WARNING(
            "Cannot build EPT using entwine (%s), attempting with untwine..." %
            str(e))
        dir_cleanup()
        build_untwine(input_point_cloud_files,
                      tmpdir,
                      output_path,
                      max_concurrency=max_concurrency)

    if os.path.exists(tmpdir):
        shutil.rmtree(tmpdir)
Exemple #11
0
def mve_cleanmesh(mve_confidence, mve_model, max_concurrency):
    if mve_confidence > 0:
        mve_filtered_model = io.related_file_path(mve_model,
                                                  postfix=".filtered")
        system.run('%s -t%s --no-clean --component-size=0 "%s" "%s"' %
                   (context.meshclean_path, min(
                       1.0, mve_confidence), mve_model, mve_filtered_model),
                   env_vars={'OMP_NUM_THREADS': max_concurrency})

        # if io.file_exists(mve_filtered_model):
        #     os.remove(tree.mve_model)
        #     os.rename(mve_filtered_model, tree.mve_model)
        # else:
        #     log.ODM_WARNING("Couldn't filter MVE model (%s does not exist)." % mve_filtered_model)

    # if args.optimize_disk_space:
    #     shutil.rmtree(tree.mve_views)
    else:
        log.ODM_WARNING('Found a valid MVE reconstruction file in: %s' %
                        mve_model)
def build(input_point_cloud_files,
          output_path,
          max_concurrency=8,
          rerun=False):
    num_files = len(input_point_cloud_files)
    if num_files == 0:
        log.ODM_WARNING("No input point cloud files to process")
        return

    tmpdir = io.related_file_path(output_path, postfix="-tmp")

    if rerun and io.dir_exists(output_path):
        log.ODM_WARNING("Removing previous EPT directory: %s" % output_path)
        shutil.rmtree(output_path)

    kwargs = {
        'threads': max_concurrency,
        'tmpdir': tmpdir,
        'all_inputs': "-i " + " ".join(map(quote, input_point_cloud_files)),
        'outputdir': output_path
    }

    # Run scan to compute dataset bounds
    system.run(
        'entwine scan --threads {threads} --tmp "{tmpdir}" {all_inputs} -o "{outputdir}"'
        .format(**kwargs))
    scan_json = os.path.join(output_path, "scan.json")

    if os.path.exists(scan_json):
        kwargs['input'] = scan_json
        for _ in range(num_files):
            # One at a time
            system.run(
                'entwine build --threads {threads} --tmp "{tmpdir}" -i "{input}" -o "{outputdir}" --run 1'
                .format(**kwargs))
    else:
        log.ODM_WARNING("%s does not exist, no point cloud will be built." %
                        scan_json)

    if os.path.exists(tmpdir):
        shutil.rmtree(tmpdir)
Exemple #13
0
    def process(self, args, outputs):
        cm = outputs["cm"]
        
        # TODO: support for delaunay (needs CGAL), 2.5D mesh
        mesh_file = os.path.join(args.project_path, 'odm_meshing', 'odm_mesh.ply')
        mesh_file_dirty = io.related_file_path(mesh_file, postfix=".dirty")

        mesher_type = "{}_mesher".format(args.mesher.lower())

        if not os.path.exists(mesh_file) or self.rerun():
            kwargs = {}
            if mesher_type == "poisson_mesher":
                kwargs['PoissonMeshing.depth'] = args.mesh_octree_depth
                kwargs['PoissonMeshing.trim'] = 0
                # kwargs['PoissonMeshing.color'] = 0

            cm.run(mesher_type, input_path=outputs["point_cloud_ply_file"],
                                output_path=mesh_file_dirty,
                                **kwargs)

            kwargs = {
                'outfile': mesh_file,
                'infile': mesh_file_dirty,
                'max_vertex': args.mesh_size,
                'verbose': '-verbose' if args.verbose else ''
            }

            system.run('odm_cleanmesh -inputFile {infile} '
                '-outputFile {outfile} '
                '-removeIslands '
                '-decimateMesh {max_vertex} {verbose} '.format(**kwargs))

            if os.path.exists(mesh_file_dirty):
                os.remove(mesh_file_dirty)
        else:
            log.ODM_WARNING("Found existing mesh: %s" % mesh_file)

        outputs["mesh_file"] = mesh_file
    def process(self, args, outputs):
        tree = outputs['tree']
        las_model_found = io.file_exists(tree.odm_georeferencing_model_laz)

        log.ODM_INFO('Classify: ' + str(args.pc_classify))
        log.ODM_INFO('Create DSM: ' + str(args.dsm))
        log.ODM_INFO('Create DTM: ' + str(args.dtm))
        log.ODM_INFO('DEM input file {0} found: {1}'.format(tree.odm_georeferencing_model_laz, str(las_model_found)))

        # define paths and create working directories
        odm_dem_root = tree.path('odm_dem')
        if not io.dir_exists(odm_dem_root):
            system.mkdir_p(odm_dem_root)

        if args.pc_classify and las_model_found:
            pc_classify_marker = os.path.join(odm_dem_root, 'pc_classify_done.txt')

            if not io.file_exists(pc_classify_marker) or self.rerun():
                log.ODM_INFO("Classifying {} using Simple Morphological Filter".format(tree.odm_georeferencing_model_laz))
                commands.classify(tree.odm_georeferencing_model_laz,
                                  args.smrf_scalar, 
                                  args.smrf_slope, 
                                  args.smrf_threshold, 
                                  args.smrf_window,
                                  verbose=args.verbose
                                )

                with open(pc_classify_marker, 'w') as f:
                    f.write('Classify: smrf\n')
                    f.write('Scalar: {}\n'.format(args.smrf_scalar))
                    f.write('Slope: {}\n'.format(args.smrf_slope))
                    f.write('Threshold: {}\n'.format(args.smrf_threshold))
                    f.write('Window: {}\n'.format(args.smrf_window))
            
        progress = 20
        self.update_progress(progress)

        # Do we need to process anything here?
        if (args.dsm or args.dtm) and las_model_found:
            dsm_output_filename = os.path.join(odm_dem_root, 'dsm.tif')
            dtm_output_filename = os.path.join(odm_dem_root, 'dtm.tif')

            if (args.dtm and not io.file_exists(dtm_output_filename)) or \
                (args.dsm and not io.file_exists(dsm_output_filename)) or \
                self.rerun():

                products = []
                if args.dsm: products.append('dsm')
                if args.dtm: products.append('dtm')
                
                resolution = gsd.cap_resolution(args.dem_resolution, tree.opensfm_reconstruction, gsd_error_estimate=-3, ignore_gsd=args.ignore_gsd)
                radius_steps = [(resolution / 100.0) / 2.0]
                for _ in range(args.dem_gapfill_steps - 1):
                    radius_steps.append(radius_steps[-1] * 2) # 2 is arbitrary, maybe there's a better value?

                for product in products:
                    commands.create_dem(
                            tree.odm_georeferencing_model_laz,
                            product,
                            output_type='idw' if product == 'dtm' else 'max',
                            radiuses=map(str, radius_steps),
                            gapfill=args.dem_gapfill_steps > 0,
                            outdir=odm_dem_root,
                            resolution=resolution / 100.0,
                            decimation=args.dem_decimation,
                            verbose=args.verbose,
                            max_workers=args.max_concurrency,
                            keep_unfilled_copy=args.dem_euclidean_map
                        )

                    dem_geotiff_path = os.path.join(odm_dem_root, "{}.tif".format(product))
                    bounds_file_path = os.path.join(tree.odm_georeferencing, 'odm_georeferenced_model.bounds.gpkg')

                    if args.crop > 0:
                        # Crop DEM
                        Cropper.crop(bounds_file_path, dem_geotiff_path, utils.get_dem_vars(args))

                    if args.dem_euclidean_map:
                        unfilled_dem_path = io.related_file_path(dem_geotiff_path, postfix=".unfilled")
                        
                        if args.crop > 0:
                            # Crop unfilled DEM
                            Cropper.crop(bounds_file_path, unfilled_dem_path, utils.get_dem_vars(args))

                        commands.compute_euclidean_map(unfilled_dem_path, 
                                            io.related_file_path(dem_geotiff_path, postfix=".euclideand"), 
                                            overwrite=True)
                    
                    progress += 30
                    self.update_progress(progress)
            else:
                log.ODM_WARNING('Found existing outputs in: %s' % odm_dem_root)
        else:
            log.ODM_WARNING('DEM will not be generated')
Exemple #15
0
def compute_cutline(orthophoto_file,
                    crop_area_file,
                    destination,
                    max_concurrency=1,
                    scale=1):
    if io.file_exists(orthophoto_file) and io.file_exists(crop_area_file):
        log.ODM_INFO("Computing cutline")

        scale = max(0.0001, min(1, scale))
        scaled_orthophoto = None
        if scale < 1:
            log.ODM_INFO("Scaling orthophoto to %s%% to compute cutline" %
                         (scale * 100))

            scaled_orthophoto = io.related_file_path(orthophoto_file,
                                                     postfix=".scaled")
            # Scale orthophoto before computing cutline
            system.run("gdal_translate -outsize {}% 0 "
                       "-co NUM_THREADS={} "
                       "--config GDAL_CACHEMAX {}% "
                       '"{}" "{}"'.format(scale * 100, max_concurrency,
                                          concurrency.get_max_memory(),
                                          orthophoto_file, scaled_orthophoto))

            orthophoto_file = scaled_orthophoto

        # open raster
        f = rasterio.open(orthophoto_file)
        rast = f.read(1)  # First band only
        height, width = rast.shape
        number_lines = int(max(8, math.ceil(min(width, height) / 256.0)))
        line_hor_offset = int(width / number_lines)
        line_ver_offset = int(height / number_lines)

        if line_hor_offset <= 2 or line_ver_offset <= 2:
            log.ODM_WARNING(
                "Cannot compute cutline, orthophoto is too small (%sx%spx)" %
                (width, height))
            return

        crop_f = fiona.open(crop_area_file, 'r')
        if len(crop_f) == 0:
            log.ODM_WARNING("Crop area is empty, cannot compute cutline")
            return

        crop_poly = shape(crop_f[1]['geometry'])
        crop_f.close()

        linestrings = []

        # Compute canny edges on first band
        edges = canny(rast)

        def compute_linestrings(direction):
            log.ODM_INFO("Computing %s cutlines" % direction)
            # Initialize cost map
            cost_map = np.full((height, width), 1, dtype=np.float32)

            # Write edges to cost map
            cost_map[edges == True] = 0  # Low cost

            # Write "barrier, floor is lava" costs
            if direction == 'vertical':
                lines = [((i, 0), (i, height - 1))
                         for i in range(line_hor_offset, width -
                                        line_hor_offset, line_hor_offset)]
                points = []
                pad_x = int(line_hor_offset / 2.0)
                for i in range(0, len(lines)):
                    a, b = lines[i]
                    points.append(((a[0] - pad_x, a[1]), (b[0] - pad_x, b[1])))
                a, b = lines[-1]
                points.append(((a[0] + pad_x, a[1]), (b[0] + pad_x, b[1])))
            else:
                lines = [((0, j), (width - 1, j))
                         for j in range(line_ver_offset, height -
                                        line_ver_offset, line_ver_offset)]
                points = []
                pad_y = int(line_ver_offset / 2.0)
                for i in range(0, len(lines)):
                    a, b = lines[i]
                    points.append(((a[0], a[1] - pad_y), (b[0], b[1] - pad_y)))
                a, b = lines[-1]
                points.append(((a[0], a[1] + pad_y), (b[0], b[1] + pad_y)))

            for a, b in lines:
                rr, cc = line(*a, *b)
                cost_map[cc, rr] = 9999  # Lava

            # Calculate route
            for a, b in points:
                line_coords, cost = route_through_array(cost_map, (a[1], a[0]),
                                                        (b[1], b[0]),
                                                        fully_connected=True,
                                                        geometric=True)

                # Convert to geographic
                geo_line_coords = [f.xy(*c) for c in line_coords]

                # Simplify
                ls = LineString(geo_line_coords)
                linestrings.append(ls.simplify(0.05, preserve_topology=False))

        compute_linestrings('vertical')
        compute_linestrings('horizontal')

        # Generate polygons and keep only those inside the crop area
        log.ODM_INFO("Generating polygons... this could take a bit.")
        polygons = []
        for p in polygonize(unary_union(linestrings)):
            if crop_poly.contains(p):
                polygons.append(p)

        # This should never happen
        if len(polygons) == 0:
            log.ODM_WARNING("No polygons, cannot compute cutline")
            return

        log.ODM_INFO("Merging polygons")
        cutline_polygons = unary_union(polygons)
        if not hasattr(cutline_polygons, '__getitem__'):
            cutline_polygons = [cutline_polygons]

        largest_cutline = cutline_polygons[0]
        max_area = largest_cutline.area
        for p in cutline_polygons:
            if p.area > max_area:
                max_area = p.area
                largest_cutline = p

        log.ODM_INFO("Largest cutline found: %s m^2" % max_area)

        meta = {
            'crs': {
                'init': str(f.crs).lower()
            },
            'driver': 'GPKG',
            'schema': {
                'properties': {},
                'geometry': 'Polygon'
            }
        }

        # Remove previous
        if os.path.exists(destination):
            os.remove(destination)

        with fiona.open(destination, 'w', **meta) as sink:
            sink.write({
                'geometry': mapping(largest_cutline),
                'properties': {}
            })
        f.close()
        log.ODM_INFO("Wrote %s" % destination)

        # Cleanup
        if scaled_orthophoto is not None and os.path.exists(scaled_orthophoto):
            os.remove(scaled_orthophoto)
    else:
        log.ODM_WARNING(
            "We've been asked to compute cutline, but either %s or %s is missing. Skipping..."
            % (orthophoto_file, crop_area_file))
Exemple #16
0
    def process(self, args, outputs):
        tree = outputs['tree']
        reconstruction = outputs['reconstruction']

        if not os.path.exists(tree.odm_report): system.mkdir_p(tree.odm_report)

        log.ODM_INFO("Exporting shots.geojson")

        shots_geojson = os.path.join(tree.odm_report, "shots.geojson")
        if not io.file_exists(shots_geojson) or self.rerun():
            # Extract geographical camera shots
            if reconstruction.is_georeferenced():
                shots = get_geojson_shots_from_opensfm(
                    tree.opensfm_reconstruction,
                    utm_srs=reconstruction.get_proj_srs(),
                    utm_offset=reconstruction.georef.utm_offset())
            else:
                # Pseudo geo
                shots = get_geojson_shots_from_opensfm(
                    tree.opensfm_reconstruction,
                    pseudo_geotiff=tree.odm_orthophoto_tif)

            if shots:
                with open(shots_geojson, "w") as fout:
                    fout.write(json.dumps(shots))

                log.ODM_INFO("Wrote %s" % shots_geojson)
            else:
                log.ODM_WARNING("Cannot extract shots")
        else:
            log.ODM_WARNING('Found a valid shots file in: %s' % shots_geojson)

        if args.skip_report:
            # Stop right here
            log.ODM_WARNING("Skipping report generation as requested")
            return

        # Augment OpenSfM stats file with our own stats
        odm_stats_json = os.path.join(tree.odm_report, "stats.json")
        octx = OSFMContext(tree.opensfm)
        osfm_stats_json = octx.path("stats", "stats.json")
        odm_stats = None
        point_cloud_file = None
        views_dimension = None

        if not os.path.exists(odm_stats_json) or self.rerun():
            if os.path.exists(osfm_stats_json):
                with open(osfm_stats_json, 'r') as f:
                    odm_stats = json.loads(f.read())

                # Add point cloud stats
                if os.path.exists(tree.odm_georeferencing_model_laz):
                    point_cloud_file = tree.odm_georeferencing_model_laz
                    views_dimension = "UserData"

                    # pc_info_file should have been generated by cropper
                    pc_info_file = os.path.join(
                        tree.odm_georeferencing,
                        "odm_georeferenced_model.info.json")
                    odm_stats[
                        'point_cloud_statistics'] = generate_point_cloud_stats(
                            tree.odm_georeferencing_model_laz, pc_info_file,
                            self.rerun())
                else:
                    ply_pc = os.path.join(tree.odm_filterpoints,
                                          "point_cloud.ply")
                    if os.path.exists(ply_pc):
                        point_cloud_file = ply_pc
                        views_dimension = "views"

                        pc_info_file = os.path.join(tree.odm_filterpoints,
                                                    "point_cloud.info.json")
                        odm_stats[
                            'point_cloud_statistics'] = generate_point_cloud_stats(
                                ply_pc, pc_info_file, self.rerun())
                    else:
                        log.ODM_WARNING("No point cloud found")

                odm_stats['point_cloud_statistics'][
                    'dense'] = not args.fast_orthophoto

                # Add runtime stats
                total_time = (system.now_raw() -
                              outputs['start_time']).total_seconds()
                odm_stats['odm_processing_statistics'] = {
                    'total_time':
                    total_time,
                    'total_time_human':
                    hms(total_time),
                    'average_gsd':
                    gsd.opensfm_reconstruction_average_gsd(
                        octx.recon_file(),
                        use_all_shots=reconstruction.has_gcp()),
                }

                with open(odm_stats_json, 'w') as f:
                    f.write(json.dumps(odm_stats))
            else:
                log.ODM_WARNING(
                    "Cannot generate report, OpenSfM stats are missing")
        else:
            log.ODM_WARNING("Reading existing stats %s" % odm_stats_json)
            with open(odm_stats_json, 'r') as f:
                odm_stats = json.loads(f.read())

        # Generate overlap diagram
        if odm_stats.get('point_cloud_statistics'
                         ) and point_cloud_file and views_dimension:
            bounds = odm_stats['point_cloud_statistics'].get('stats', {}).get(
                'bbox', {}).get('native', {}).get('bbox')
            if bounds:
                image_target_size = 1400  # pixels
                osfm_stats_dir = os.path.join(tree.opensfm, "stats")
                diagram_tiff = os.path.join(osfm_stats_dir, "overlap.tif")
                diagram_png = os.path.join(osfm_stats_dir, "overlap.png")

                width = bounds.get('maxx') - bounds.get('minx')
                height = bounds.get('maxy') - bounds.get('miny')
                max_dim = max(width, height)
                resolution = float(max_dim) / float(image_target_size)
                radius = resolution * math.sqrt(2)

                # Larger radius for sparse point cloud diagram
                if not odm_stats['point_cloud_statistics']['dense']:
                    radius *= 10

                system.run("pdal translate -i \"{}\" "
                           "-o \"{}\" "
                           "--writer gdal "
                           "--writers.gdal.resolution={} "
                           "--writers.gdal.data_type=uint8_t "
                           "--writers.gdal.dimension={} "
                           "--writers.gdal.output_type=max "
                           "--writers.gdal.radius={} ".format(
                               point_cloud_file, diagram_tiff, resolution,
                               views_dimension, radius))
                report_assets = os.path.abspath(
                    os.path.join(os.path.dirname(__file__),
                                 "../opendm/report"))
                overlap_color_map = os.path.join(report_assets,
                                                 "overlap_color_map.txt")

                bounds_file_path = os.path.join(
                    tree.odm_georeferencing,
                    'odm_georeferenced_model.bounds.gpkg')
                if (args.crop > 0
                        or args.boundary) and os.path.isfile(bounds_file_path):
                    Cropper.crop(bounds_file_path,
                                 diagram_tiff,
                                 get_orthophoto_vars(args),
                                 keep_original=False)

                system.run(
                    "gdaldem color-relief \"{}\" \"{}\" \"{}\" -of PNG -alpha".
                    format(diagram_tiff, overlap_color_map, diagram_png))

                # Copy assets
                for asset in [
                        "overlap_diagram_legend.png", "dsm_gradient.png"
                ]:
                    shutil.copy(os.path.join(report_assets, asset),
                                os.path.join(osfm_stats_dir, asset))

                # Generate previews of ortho/dsm
                if os.path.isfile(tree.odm_orthophoto_tif):
                    osfm_ortho = os.path.join(osfm_stats_dir, "ortho.png")
                    generate_png(tree.odm_orthophoto_tif, osfm_ortho,
                                 image_target_size)

                dems = []
                if args.dsm:
                    dems.append("dsm")
                if args.dtm:
                    dems.append("dtm")

                for dem in dems:
                    dem_file = tree.path("odm_dem", "%s.tif" % dem)
                    if os.path.isfile(dem_file):
                        # Resize first (faster)
                        resized_dem_file = io.related_file_path(
                            dem_file, postfix=".preview")
                        system.run(
                            "gdal_translate -outsize {} 0 \"{}\" \"{}\" --config GDAL_CACHEMAX {}%"
                            .format(image_target_size, dem_file,
                                    resized_dem_file, get_max_memory()))

                        log.ODM_INFO("Computing raster stats for %s" %
                                     resized_dem_file)
                        dem_stats = get_raster_stats(resized_dem_file)
                        if len(dem_stats) > 0:
                            odm_stats[dem + '_statistics'] = dem_stats[0]

                        osfm_dem = os.path.join(osfm_stats_dir, "%s.png" % dem)
                        colored_dem, hillshade_dem, colored_hillshade_dem = generate_colored_hillshade(
                            resized_dem_file)
                        system.run(
                            "gdal_translate -outsize {} 0 -of png \"{}\" \"{}\" --config GDAL_CACHEMAX {}%"
                            .format(image_target_size, colored_hillshade_dem,
                                    osfm_dem, get_max_memory()))
                        for f in [
                                resized_dem_file, colored_dem, hillshade_dem,
                                colored_hillshade_dem
                        ]:
                            if os.path.isfile(f):
                                os.remove(f)
            else:
                log.ODM_WARNING(
                    "Cannot generate overlap diagram, cannot compute point cloud bounds"
                )
        else:
            log.ODM_WARNING(
                "Cannot generate overlap diagram, point cloud stats missing")

        octx.export_report(os.path.join(tree.odm_report, "report.pdf"),
                           odm_stats, self.rerun())
Exemple #17
0
def create_dem(input_point_cloud, dem_type, output_type='max', radiuses=['0.56'], gapfill=True,
                outdir='', resolution=0.1, max_workers=1, max_tile_size=4096,
                verbose=False, decimation=None, keep_unfilled_copy=False,
                apply_smoothing=True):
    """ Create DEM from multiple radii, and optionally gapfill """
    
    global error
    error = None

    start = datetime.now()

    if not os.path.exists(outdir):
        log.ODM_INFO("Creating %s" % outdir)
        os.mkdir(outdir)

    extent = point_cloud.get_extent(input_point_cloud)
    log.ODM_INFO("Point cloud bounds are [minx: %s, maxx: %s] [miny: %s, maxy: %s]" % (extent['minx'], extent['maxx'], extent['miny'], extent['maxy']))
    ext_width = extent['maxx'] - extent['minx']
    ext_height = extent['maxy'] - extent['miny']

    w, h = (int(math.ceil(ext_width / float(resolution))),
            int(math.ceil(ext_height / float(resolution))))

    # Set a floor, no matter the resolution parameter
    # (sometimes a wrongly estimated scale of the model can cause the resolution
    # to be set unrealistically low, causing errors)
    RES_FLOOR = 64
    if w < RES_FLOOR and h < RES_FLOOR:
        prev_w, prev_h = w, h
        
        if w >= h:
            w, h = (RES_FLOOR, int(math.ceil(ext_height / ext_width * RES_FLOOR)))
        else:
            w, h = (int(math.ceil(ext_width / ext_height * RES_FLOOR)), RES_FLOOR)
        
        floor_ratio = prev_w / float(w)
        resolution *= floor_ratio
        radiuses = [str(float(r) * floor_ratio) for r in radiuses]

        log.ODM_WARNING("Really low resolution DEM requested %s will set floor at %s pixels. Resolution changed to %s. The scale of this reconstruction might be off." % ((prev_w, prev_h), RES_FLOOR, resolution))
        
    final_dem_pixels = w * h

    num_splits = int(max(1, math.ceil(math.log(math.ceil(final_dem_pixels / float(max_tile_size * max_tile_size)))/math.log(2))))
    num_tiles = num_splits * num_splits
    log.ODM_INFO("DEM resolution is %s, max tile size is %s, will split DEM generation into %s tiles" % ((h, w), max_tile_size, num_tiles))

    tile_bounds_width = ext_width / float(num_splits)
    tile_bounds_height = ext_height / float(num_splits)

    tiles = []

    for r in radiuses:
        minx = extent['minx']

        for x in range(num_splits):
            miny = extent['miny']
            if x == num_splits - 1:
                maxx = extent['maxx']
            else:
                maxx = minx + tile_bounds_width

            for y in range(num_splits):
                if y == num_splits - 1:
                    maxy = extent['maxy']
                else:
                    maxy = miny + tile_bounds_height

                filename = os.path.join(os.path.abspath(outdir), '%s_r%s_x%s_y%s.tif' % (dem_type, r, x, y))

                tiles.append({
                    'radius': r,
                    'bounds': {
                        'minx': minx,
                        'maxx': maxx,
                        'miny': miny,
                        'maxy': maxy 
                    },
                    'filename': filename
                })

                miny = maxy
            minx = maxx

    # Sort tiles by increasing radius
    tiles.sort(key=lambda t: float(t['radius']), reverse=True)

    def process_tile(q):
        log.ODM_INFO("Generating %s (%s, radius: %s, resolution: %s)" % (q['filename'], output_type, q['radius'], resolution))
        
        d = pdal.json_gdal_base(q['filename'], output_type, q['radius'], resolution, q['bounds'])

        if dem_type == 'dtm':
            d = pdal.json_add_classification_filter(d, 2)

        if decimation is not None:
            d = pdal.json_add_decimation_filter(d, decimation)

        pdal.json_add_readers(d, [input_point_cloud])
        pdal.run_pipeline(d, verbose=verbose)

    parallel_map(process_tile, tiles, max_workers)

    output_file = "%s.tif" % dem_type
    output_path = os.path.abspath(os.path.join(outdir, output_file))

    # Verify tile results
    for t in tiles: 
        if not os.path.exists(t['filename']):
            raise Exception("Error creating %s, %s failed to be created" % (output_file, t['filename']))
    
    # Create virtual raster
    tiles_vrt_path = os.path.abspath(os.path.join(outdir, "tiles.vrt"))
    run('gdalbuildvrt "%s" "%s"' % (tiles_vrt_path, '" "'.join(map(lambda t: t['filename'], tiles))))

    merged_vrt_path = os.path.abspath(os.path.join(outdir, "merged.vrt"))
    geotiff_tmp_path = os.path.abspath(os.path.join(outdir, 'tiles.tmp.tif'))
    geotiff_small_path = os.path.abspath(os.path.join(outdir, 'tiles.small.tif'))
    geotiff_small_filled_path = os.path.abspath(os.path.join(outdir, 'tiles.small_filled.tif'))
    geotiff_path = os.path.abspath(os.path.join(outdir, 'tiles.tif'))

    # Build GeoTIFF
    kwargs = {
        'max_memory': get_max_memory(),
        'threads': max_workers if max_workers else 'ALL_CPUS',
        'tiles_vrt': tiles_vrt_path,
        'merged_vrt': merged_vrt_path,
        'geotiff': geotiff_path,
        'geotiff_tmp': geotiff_tmp_path,
        'geotiff_small': geotiff_small_path,
        'geotiff_small_filled': geotiff_small_filled_path
    }

    if gapfill:
        # Sometimes, for some reason gdal_fillnodata.py
        # behaves strangely when reading data directly from a .VRT
        # so we need to convert to GeoTIFF first.
        run('gdal_translate '
                '-co NUM_THREADS={threads} '
                '--config GDAL_CACHEMAX {max_memory}% '
                '{tiles_vrt} {geotiff_tmp}'.format(**kwargs))

        # Scale to 10% size
        run('gdal_translate '
            '-co NUM_THREADS={threads} '
            '--config GDAL_CACHEMAX {max_memory}% '
            '-outsize 10% 0 '
            '{geotiff_tmp} {geotiff_small}'.format(**kwargs))

        # Fill scaled
        run('gdal_fillnodata.py '
            '-co NUM_THREADS={threads} '
            '--config GDAL_CACHEMAX {max_memory}% '
            '-b 1 '
            '-of GTiff '
            '{geotiff_small} {geotiff_small_filled}'.format(**kwargs))

        # Merge filled scaled DEM with unfilled DEM using bilinear interpolation
        run('gdalbuildvrt -resolution highest -r bilinear "%s" "%s" "%s"' % (merged_vrt_path, geotiff_small_filled_path, geotiff_tmp_path))
        run('gdal_translate '
            '-co NUM_THREADS={threads} '
            '-co TILED=YES '
            '-co COMPRESS=DEFLATE '
            '--config GDAL_CACHEMAX {max_memory}% '
            '{merged_vrt} {geotiff}'.format(**kwargs))
    else:
        run('gdal_translate '
                '-co NUM_THREADS={threads} '
                '-co TILED=YES '
                '-co COMPRESS=DEFLATE '
                '--config GDAL_CACHEMAX {max_memory}% '
                '{tiles_vrt} {geotiff}'.format(**kwargs))

    if apply_smoothing:
        median_smoothing(geotiff_path, output_path)
        os.remove(geotiff_path)
    else:
        os.rename(geotiff_path, output_path)

    if os.path.exists(geotiff_tmp_path):
        if not keep_unfilled_copy: 
            os.remove(geotiff_tmp_path)
        else:
            os.rename(geotiff_tmp_path, io.related_file_path(output_path, postfix=".unfilled"))
    
    for cleanup_file in [tiles_vrt_path, merged_vrt_path, geotiff_small_path, geotiff_small_filled_path]:
        if os.path.exists(cleanup_file): os.remove(cleanup_file)
    for t in tiles:
        if os.path.exists(t['filename']): os.remove(t['filename'])
    
    log.ODM_INFO('Completed %s in %s' % (output_file, datetime.now() - start))
Exemple #18
0
def filter(input_point_cloud, output_point_cloud, standard_deviation=2.5, meank=16, sample_radius=0, verbose=False, max_concurrency=1):
    """
    Filters a point cloud
    """
    if not os.path.exists(input_point_cloud):
        log.ODM_ERROR("{} does not exist. The program will now exit.".format(input_point_cloud))
        sys.exit(1)

    if (standard_deviation <= 0 or meank <= 0) and sample_radius <= 0:
        log.ODM_INFO("Skipping point cloud filtering")
        # if using the option `--pc-filter 0`, we need copy input_point_cloud
        shutil.copy(input_point_cloud, output_point_cloud)
        return

    filters = []

    if sample_radius > 0:
        log.ODM_INFO("Sampling points around a %sm radius" % sample_radius)
        filters.append('sample')

    if standard_deviation > 0 and meank > 0:
        log.ODM_INFO("Filtering {} (statistical, meanK {}, standard deviation {})".format(input_point_cloud, meank, standard_deviation))
        filters.append('outlier')

    if len(filters) > 0:
        filters.append('range')

    info = ply_info(input_point_cloud)
    dims = "x=float,y=float,z=float,"
    if info['has_normals']:
        dims += "nx=float,ny=float,nz=float,"
    dims += "red=uchar,blue=uchar,green=uchar"
    if info['has_views']:
        dims += ",views=uchar"

    if info['vertex_count'] == 0:
        log.ODM_ERROR("Cannot read vertex count for {}".format(input_point_cloud))
        sys.exit(1)

    # Do we need to split this?
    VERTEX_THRESHOLD = 250000
    should_split = max_concurrency > 1 and info['vertex_count'] > VERTEX_THRESHOLD*2

    if should_split:
        partsdir = os.path.join(os.path.dirname(output_point_cloud), "parts")
        if os.path.exists(partsdir):
            log.ODM_WARNING("Removing existing directory %s" % partsdir)
            shutil.rmtree(partsdir)

        point_cloud_submodels = split(input_point_cloud, partsdir, "part.ply", capacity=VERTEX_THRESHOLD, dims=dims)

        def run_filter(pcs):
            # Recurse
            filter(pcs['path'], io.related_file_path(pcs['path'], postfix="_filtered"), 
                        standard_deviation=standard_deviation, 
                        meank=meank, 
                        sample_radius=sample_radius, 
                        verbose=verbose,
                        max_concurrency=1)
        # Filter
        parallel_map(run_filter, [{'path': p} for p in point_cloud_submodels], max_concurrency)

        # Merge
        log.ODM_INFO("Merging %s point cloud chunks to %s" % (len(point_cloud_submodels), output_point_cloud))
        filtered_pcs = [io.related_file_path(pcs, postfix="_filtered") for pcs in point_cloud_submodels]
        #merge_ply(filtered_pcs, output_point_cloud, dims)
        fast_merge_ply(filtered_pcs, output_point_cloud)

        if os.path.exists(partsdir):
            shutil.rmtree(partsdir)
    else:
        # Process point cloud (or a point cloud submodel) in a single step
        filterArgs = {
            'inputFile': input_point_cloud,
            'outputFile': output_point_cloud,
            'stages': " ".join(filters),
            'dims': dims
        }

        cmd = ("pdal translate -i \"{inputFile}\" "
                "-o \"{outputFile}\" "
                "{stages} "
                "--writers.ply.sized_types=false "
                "--writers.ply.storage_mode='little endian' "
                "--writers.ply.dims=\"{dims}\" "
                "").format(**filterArgs)

        if 'sample' in filters:
            cmd += "--filters.sample.radius={} ".format(sample_radius)
        
        if 'outlier' in filters:
            cmd += ("--filters.outlier.method='statistical' "
                "--filters.outlier.mean_k={} "
                "--filters.outlier.multiplier={} ").format(meank, standard_deviation)  
        
        if 'range' in filters:
            # Remove outliers
            cmd += "--filters.range.limits='Classification![7:7]' "

        system.run(cmd)

    if not os.path.exists(output_point_cloud):
        log.ODM_WARNING("{} not found, filtering has failed.".format(output_point_cloud))
Exemple #19
0
    def process(self, args, outputs):
        tree = outputs['tree']
        reconstruction = outputs['reconstruction']

        dem_input = tree.odm_georeferencing_model_laz
        pc_model_found = io.file_exists(dem_input)
        ignore_resolution = False
        pseudo_georeference = False

        if not reconstruction.is_georeferenced():
            log.ODM_WARNING(
                "Not georeferenced, using ungeoreferenced point cloud...")
            ignore_resolution = True
            pseudo_georeference = True

        # It is probably not reasonable to have accurate DEMs a the same resolution as the source photos, so reduce it
        # by a factor!
        gsd_scaling = 2.0

        resolution = gsd.cap_resolution(args.dem_resolution,
                                        tree.opensfm_reconstruction,
                                        gsd_scaling=gsd_scaling,
                                        ignore_gsd=args.ignore_gsd,
                                        ignore_resolution=ignore_resolution
                                        and args.ignore_gsd,
                                        has_gcp=reconstruction.has_gcp())

        log.ODM_INFO('Classify: ' + str(args.pc_classify))
        log.ODM_INFO('Create DSM: ' + str(args.dsm))
        log.ODM_INFO('Create DTM: ' + str(args.dtm))
        log.ODM_INFO('DEM input file {0} found: {1}'.format(
            dem_input, str(pc_model_found)))

        # define paths and create working directories
        odm_dem_root = tree.path('odm_dem')
        if not io.dir_exists(odm_dem_root):
            system.mkdir_p(odm_dem_root)

        if args.pc_classify and pc_model_found:
            pc_classify_marker = os.path.join(odm_dem_root,
                                              'pc_classify_done.txt')

            if not io.file_exists(pc_classify_marker) or self.rerun():
                log.ODM_INFO(
                    "Classifying {} using Simple Morphological Filter".format(
                        dem_input))
                commands.classify(dem_input,
                                  args.smrf_scalar,
                                  args.smrf_slope,
                                  args.smrf_threshold,
                                  args.smrf_window,
                                  verbose=args.verbose)

                with open(pc_classify_marker, 'w') as f:
                    f.write('Classify: smrf\n')
                    f.write('Scalar: {}\n'.format(args.smrf_scalar))
                    f.write('Slope: {}\n'.format(args.smrf_slope))
                    f.write('Threshold: {}\n'.format(args.smrf_threshold))
                    f.write('Window: {}\n'.format(args.smrf_window))

        progress = 20
        self.update_progress(progress)

        if args.pc_rectify:
            commands.rectify(dem_input, args.debug)

        # Do we need to process anything here?
        if (args.dsm or args.dtm) and pc_model_found:
            dsm_output_filename = os.path.join(odm_dem_root, 'dsm.tif')
            dtm_output_filename = os.path.join(odm_dem_root, 'dtm.tif')

            if (args.dtm and not io.file_exists(dtm_output_filename)) or \
                (args.dsm and not io.file_exists(dsm_output_filename)) or \
                self.rerun():

                products = []

                if args.dsm or (args.dtm and args.dem_euclidean_map):
                    products.append('dsm')
                if args.dtm: products.append('dtm')

                radius_steps = [(resolution / 100.0) / 2.0]
                for _ in range(args.dem_gapfill_steps - 1):
                    radius_steps.append(
                        radius_steps[-1] *
                        2)  # 2 is arbitrary, maybe there's a better value?

                for product in products:
                    commands.create_dem(
                        dem_input,
                        product,
                        output_type='idw' if product == 'dtm' else 'max',
                        radiuses=list(map(str, radius_steps)),
                        gapfill=args.dem_gapfill_steps > 0,
                        outdir=odm_dem_root,
                        resolution=resolution / 100.0,
                        decimation=args.dem_decimation,
                        verbose=args.verbose,
                        max_workers=args.max_concurrency,
                        keep_unfilled_copy=args.dem_euclidean_map)

                    dem_geotiff_path = os.path.join(odm_dem_root,
                                                    "{}.tif".format(product))
                    bounds_file_path = os.path.join(
                        tree.odm_georeferencing,
                        'odm_georeferenced_model.bounds.gpkg')

                    if args.crop > 0 or args.boundary:
                        # Crop DEM
                        Cropper.crop(
                            bounds_file_path,
                            dem_geotiff_path,
                            utils.get_dem_vars(args),
                            keep_original=not args.optimize_disk_space)

                    if args.dem_euclidean_map:
                        unfilled_dem_path = io.related_file_path(
                            dem_geotiff_path, postfix=".unfilled")

                        if args.crop > 0 or args.boundary:
                            # Crop unfilled DEM
                            Cropper.crop(
                                bounds_file_path,
                                unfilled_dem_path,
                                utils.get_dem_vars(args),
                                keep_original=not args.optimize_disk_space)

                        commands.compute_euclidean_map(
                            unfilled_dem_path,
                            io.related_file_path(dem_geotiff_path,
                                                 postfix=".euclideand"),
                            overwrite=True)

                    if pseudo_georeference:
                        pseudogeo.add_pseudo_georeferencing(dem_geotiff_path)

                    if args.tiles:
                        generate_dem_tiles(dem_geotiff_path,
                                           tree.path("%s_tiles" % product),
                                           args.max_concurrency)

                    if args.cog:
                        convert_to_cogeo(dem_geotiff_path,
                                         max_workers=args.max_concurrency)

                    progress += 30
                    self.update_progress(progress)
            else:
                log.ODM_WARNING('Found existing outputs in: %s' % odm_dem_root)
        else:
            log.ODM_WARNING('DEM will not be generated')
Exemple #20
0
    def process(self, args, outputs):
        # get inputs
        tree = outputs['tree']
        reconstruction = outputs['reconstruction']
        photos = reconstruction.photos

        if not photos:
            log.ODM_ERROR('Not enough photos in photos array to start MVE')
            exit(1)

        # check if reconstruction was done before
        if not io.file_exists(tree.mve_model) or self.rerun():
            # mve makescene wants the output directory
            # to not exists before executing it (otherwise it
            # will prompt the user for confirmation)
            if io.dir_exists(tree.mve):
                shutil.rmtree(tree.mve)

            # run mve makescene
            if not io.dir_exists(tree.mve_views):
                system.run('%s "%s" "%s"' %
                           (context.makescene_path,
                            tree.opensfm_reconstruction_nvm, tree.mve),
                           env_vars={'OMP_NUM_THREADS': args.max_concurrency})

            self.update_progress(10)

            # Compute mve output scale based on depthmap_resolution
            max_pixels = args.depthmap_resolution * args.depthmap_resolution
            if outputs['undist_image_max_size'] * outputs[
                    'undist_image_max_size'] <= max_pixels:
                mve_output_scale = 0
            else:
                ratio = float(
                    outputs['undist_image_max_size'] *
                    outputs['undist_image_max_size']) / float(max_pixels)
                mve_output_scale = int(
                    math.ceil(math.log(ratio) / math.log(4.0)))

            dmrecon_config = [
                "-s%s" % mve_output_scale,
                "--progress=silent",
                "--local-neighbors=2",
                # "--filter-width=3",
            ]

            # Run MVE's dmrecon
            log.ODM_INFO(
                '                                                                               '
            )
            log.ODM_INFO(
                '                                    ,*/**                                      '
            )
            log.ODM_INFO(
                '                                  ,*@%*/@%*                                    '
            )
            log.ODM_INFO(
                '                                ,/@%******@&*.                                 '
            )
            log.ODM_INFO(
                '                              ,*@&*********/@&*                                '
            )
            log.ODM_INFO(
                '                            ,*@&**************@&*                              '
            )
            log.ODM_INFO(
                '                          ,/@&******************@&*.                           '
            )
            log.ODM_INFO(
                '                        ,*@&*********************/@&*                          '
            )
            log.ODM_INFO(
                '                      ,*@&**************************@&*.                       '
            )
            log.ODM_INFO(
                '                    ,/@&******************************&&*,                     '
            )
            log.ODM_INFO(
                '                  ,*&&**********************************@&*.                   '
            )
            log.ODM_INFO(
                '                ,*@&**************************************@&*.                 '
            )
            log.ODM_INFO(
                '              ,*@&***************#@@@@@@@@@%****************&&*,               '
            )
            log.ODM_INFO(
                '            .*&&***************&@@@@@@@@@@@@@@****************@@*.             '
            )
            log.ODM_INFO(
                '          .*@&***************&@@@@@@@@@@@@@@@@@%****(@@%********@@*.           '
            )
            log.ODM_INFO(
                '        .*@@***************%@@@@@@@@@@@@@@@@@@@@@#****&@@@@%******&@*,         '
            )
            log.ODM_INFO(
                '      .*&@****************@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@/*****@@*.       '
            )
            log.ODM_INFO(
                '    .*@@****************@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@%*************@@*.     '
            )
            log.ODM_INFO(
                '  .*@@****/***********@@@@@&**(@@@@@@@@@@@@@@@@@@@@@@@#*****************%@*,   '
            )
            log.ODM_INFO(
                ' */@*******@*******#@@@@%*******/@@@@@@@@@@@@@@@@@@@@********************/@(,  '
            )
            log.ODM_INFO(
                ' ,*@(********&@@@@@@#**************/@@@@@@@#**(@@&/**********************@&*   '
            )
            log.ODM_INFO(
                '   *#@/*******************************@@@@@***&@&**********************&@*,    '
            )
            log.ODM_INFO(
                '     *#@#******************************&@@@***@#*********************&@*,      '
            )
            log.ODM_INFO(
                '       */@#*****************************@@@************************@@*.        '
            )
            log.ODM_INFO(
                '         *#@/***************************/@@/*********************%@*,          '
            )
            log.ODM_INFO(
                '           *#@#**************************#@@%******************%@*,            '
            )
            log.ODM_INFO(
                '             */@#*************************(@@@@@@@&%/********&@*.              '
            )
            log.ODM_INFO(
                '               *(@(*********************************/%@@%**%@*,                '
            )
            log.ODM_INFO(
                '                 *(@%************************************%@**                  '
            )
            log.ODM_INFO(
                '                   **@%********************************&@*,                    '
            )
            log.ODM_INFO(
                '                     *(@(****************************%@/*                      '
            )
            log.ODM_INFO(
                '                       ,(@%************************#@/*                        '
            )
            log.ODM_INFO(
                '                         ,*@%********************&@/,                          '
            )
            log.ODM_INFO(
                '                           */@#****************#@/*                            '
            )
            log.ODM_INFO(
                '                             ,/@&************#@/*                              '
            )
            log.ODM_INFO(
                '                               ,*@&********%@/,                                '
            )
            log.ODM_INFO(
                '                                 */@#****(@/*                                  '
            )
            log.ODM_INFO(
                '                                   ,/@@@@(*                                    '
            )
            log.ODM_INFO(
                '                                     .**,                                      '
            )
            log.ODM_INFO('')
            log.ODM_INFO(
                "Running dense reconstruction. This might take a while. Please be patient, the process is not dead or hung."
            )
            log.ODM_INFO("                              Process is running")

            # TODO: find out why MVE is crashing at random
            # MVE *seems* to have a race condition, triggered randomly, regardless of dataset
            # https://gist.github.com/pierotofy/6c9ce93194ba510b61e42e3698cfbb89
            # Temporary workaround is to retry the reconstruction until we get it right
            # (up to a certain number of retries).
            retry_count = 1
            while retry_count < 10:
                try:
                    system.run(
                        '%s %s "%s"' % (context.dmrecon_path,
                                        ' '.join(dmrecon_config), tree.mve),
                        env_vars={'OMP_NUM_THREADS': args.max_concurrency})
                    break
                except Exception as e:
                    if str(e) == "Child returned 134" or str(
                            e) == "Child returned 1":
                        retry_count += 1
                        log.ODM_WARNING(
                            "Caught error code, retrying attempt #%s" %
                            retry_count)
                    else:
                        raise e

            self.update_progress(90)

            scene2pset_config = ["-F%s" % mve_output_scale]

            # run scene2pset
            system.run('%s %s "%s" "%s"' %
                       (context.scene2pset_path, ' '.join(scene2pset_config),
                        tree.mve, tree.mve_model),
                       env_vars={'OMP_NUM_THREADS': args.max_concurrency})

            # run cleanmesh (filter points by MVE confidence threshold)
            if args.mve_confidence > 0:
                mve_filtered_model = io.related_file_path(tree.mve_model,
                                                          postfix=".filtered")
                system.run(
                    '%s -t%s --no-clean --component-size=0 "%s" "%s"' %
                    (context.meshclean_path, min(1.0, args.mve_confidence),
                     tree.mve_model, mve_filtered_model),
                    env_vars={'OMP_NUM_THREADS': args.max_concurrency})

                if io.file_exists(mve_filtered_model):
                    os.remove(tree.mve_model)
                    os.rename(mve_filtered_model, tree.mve_model)
                else:
                    log.ODM_WARNING(
                        "Couldn't filter MVE model (%s does not exist)." %
                        mve_filtered_model)
        else:
            log.ODM_WARNING('Found a valid MVE reconstruction file in: %s' %
                            tree.mve_model)
Exemple #21
0
def euclidean_merge_dems(input_dems, output_dem, creation_options={}):
    """
    Based on https://github.com/mapbox/rio-merge-rgba
    and ideas from Anna Petrasova
    implementation by Piero Toffanin

    Computes a merged DEM by computing/using a euclidean 
    distance to NODATA cells map for all DEMs and then blending all overlapping DEM cells 
    by a weighted average based on such euclidean distance.
    """
    inputs = []
    bounds=None
    precision=7

    existing_dems = []
    for dem in input_dems:
        if not io.file_exists(dem):
            log.ODM_WARNING("%s does not exist. Will skip from merged DEM." % dem)
            continue
        existing_dems.append(dem)

    if len(existing_dems) == 0:
        log.ODM_WARNING("No input DEMs, skipping euclidean merge.")
        return

    with rasterio.open(existing_dems[0]) as first:
        src_nodata = first.nodatavals[0]
        res = first.res
        dtype = first.dtypes[0]
        profile = first.profile

    for dem in existing_dems:
        eumap = compute_euclidean_map(dem, io.related_file_path(dem, postfix=".euclideand"), overwrite=False)
        if eumap and io.file_exists(eumap):
            inputs.append((dem, eumap))

    log.ODM_INFO("%s valid DEM rasters to merge" % len(inputs))

    sources = [(rasterio.open(d), rasterio.open(e)) for d,e in inputs]

    # Extent from option or extent of all inputs.
    if bounds:
        dst_w, dst_s, dst_e, dst_n = bounds
    else:
        # scan input files.
        # while we're at it, validate assumptions about inputs
        xs = []
        ys = []
        for src_d, src_e in sources:
            if not same_bounds(src_d, src_e):
                raise ValueError("DEM and euclidean file must have the same bounds")

            left, bottom, right, top = src_d.bounds
            xs.extend([left, right])
            ys.extend([bottom, top])
            if src_d.profile["count"] != 1 or src_e.profile["count"] != 1:
                raise ValueError("Inputs must be 1-band rasters")
        dst_w, dst_s, dst_e, dst_n = min(xs), min(ys), max(xs), max(ys)
    log.ODM_INFO("Output bounds: %r %r %r %r" % (dst_w, dst_s, dst_e, dst_n))

    output_transform = Affine.translation(dst_w, dst_n)
    output_transform *= Affine.scale(res[0], -res[1])

    # Compute output array shape. We guarantee it will cover the output
    # bounds completely.
    output_width = int(math.ceil((dst_e - dst_w) / res[0]))
    output_height = int(math.ceil((dst_n - dst_s) / res[1]))

    # Adjust bounds to fit.
    dst_e, dst_s = output_transform * (output_width, output_height)
    log.ODM_INFO("Output width: %d, height: %d" % (output_width, output_height))
    log.ODM_INFO("Adjusted bounds: %r %r %r %r" % (dst_w, dst_s, dst_e, dst_n))

    profile["transform"] = output_transform
    profile["height"] = output_height
    profile["width"] = output_width
    profile["tiled"] = creation_options.get('TILED', 'YES') == 'YES'
    profile["blockxsize"] = creation_options.get('BLOCKXSIZE', 512)
    profile["blockysize"] = creation_options.get('BLOCKYSIZE', 512)
    profile["compress"] = creation_options.get('COMPRESS', 'LZW')
    profile["nodata"] = src_nodata

    # Creation opts
    profile.update(creation_options)

    # create destination file
    with rasterio.open(output_dem, "w", **profile) as dstrast:

        for idx, dst_window in dstrast.block_windows():

            left, bottom, right, top = dstrast.window_bounds(dst_window)

            blocksize = dst_window.width
            dst_rows, dst_cols = (dst_window.height, dst_window.width)

            # initialize array destined for the block
            dst_count = first.count
            dst_shape = (dst_count, dst_rows, dst_cols)

            dstarr = np.zeros(dst_shape, dtype=dtype)
            distsum = np.zeros(dst_shape, dtype=dtype)

            for src_d, src_e in sources:
                # The full_cover behavior is problematic here as it includes
                # extra pixels along the bottom right when the sources are
                # slightly misaligned
                #
                # src_window = get_window(left, bottom, right, top,
                #                         src.transform, precision=precision)
                #
                # With rio merge this just adds an extra row, but when the
                # imprecision occurs at each block, you get artifacts

                nodata = src_d.nodatavals[0]

                # Alternative, custom get_window using rounding
                src_window = tuple(zip(rowcol(
                        src_d.transform, left, top, op=round, precision=precision
                    ), rowcol(
                        src_d.transform, right, bottom, op=round, precision=precision
                    )))

                temp_d = np.zeros(dst_shape, dtype=dtype)
                temp_d = src_d.read(
                    out=temp_d, window=src_window, boundless=True, masked=False
                )

                temp_e = np.zeros(dst_shape, dtype=dtype)
                temp_e = src_e.read(
                    out=temp_e, window=src_window, boundless=True, masked=False
                )

                # Set NODATA areas in the euclidean map to a very low value
                # so that:
                #  - Areas with overlap prioritize DEM layers' cells that 
                #    are far away from NODATA areas
                #  - Areas that have no overlap are included in the final result
                #    even if they are very close to a NODATA cell
                temp_e[temp_e==0] = 0.001953125
                temp_e[temp_d==nodata] = 0

                np.multiply(temp_d, temp_e, out=temp_d)
                np.add(dstarr, temp_d, out=dstarr)
                np.add(distsum, temp_e, out=distsum)

            np.divide(dstarr, distsum, out=dstarr, where=distsum[0] != 0.0)
            dstarr[dstarr == 0.0] = src_nodata

            dstrast.write(dstarr, window=dst_window)

    return output_dem
Exemple #22
0
def create_dem(input_point_cloud, dem_type, output_type='max', radiuses=['0.56'], gapfill=True,
                outdir='', resolution=0.1, max_workers=1, max_tile_size=2048,
                verbose=False, decimation=None, keep_unfilled_copy=False):
    """ Create DEM from multiple radii, and optionally gapfill """
    global error
    error = None

    start = datetime.now()

    if not os.path.exists(outdir):
        log.ODM_INFO("Creating %s" % outdir)
        os.mkdir(outdir)

    extent = point_cloud.get_extent(input_point_cloud)
    log.ODM_INFO("Point cloud bounds are [minx: %s, maxx: %s] [miny: %s, maxy: %s]" % (extent['minx'], extent['maxx'], extent['miny'], extent['maxy']))
    ext_width = extent['maxx'] - extent['minx']
    ext_height = extent['maxy'] - extent['miny']

    final_dem_resolution = (int(math.ceil(ext_width / float(resolution))),
                            int(math.ceil(ext_height / float(resolution))))
    final_dem_pixels = final_dem_resolution[0] * final_dem_resolution[1]

    num_splits = int(max(1, math.ceil(math.log(math.ceil(final_dem_pixels / float(max_tile_size * max_tile_size)))/math.log(2))))
    num_tiles = num_splits * num_splits
    log.ODM_INFO("DEM resolution is %s, max tile size is %s, will split DEM generation into %s tiles" % (final_dem_resolution, max_tile_size, num_tiles))

    tile_bounds_width = ext_width / float(num_splits)
    tile_bounds_height = ext_height / float(num_splits)

    tiles = []

    for r in radiuses:
        minx = extent['minx']

        for x in range(num_splits):
            miny = extent['miny']
            if x == num_splits - 1:
                maxx = extent['maxx']
            else:
                maxx = minx + tile_bounds_width

            for y in range(num_splits):
                if y == num_splits - 1:
                    maxy = extent['maxy']
                else:
                    maxy = miny + tile_bounds_height

                filename = os.path.join(os.path.abspath(outdir), '%s_r%s_x%s_y%s.tif' % (dem_type, r, x, y))

                tiles.append({
                    'radius': r,
                    'bounds': {
                        'minx': minx,
                        'maxx': maxx,
                        'miny': miny,
                        'maxy': maxy 
                    },
                    'filename': filename
                })

                miny = maxy
            minx = maxx

    # Sort tiles by increasing radius
    tiles.sort(key=lambda t: float(t['radius']), reverse=True)

    def process_one(q):
        log.ODM_INFO("Generating %s (%s, radius: %s, resolution: %s)" % (q['filename'], output_type, q['radius'], resolution))
        
        d = pdal.json_gdal_base(q['filename'], output_type, q['radius'], resolution, q['bounds'])

        if dem_type == 'dsm':
            d = pdal.json_add_classification_filter(d, 2, equality='max')
        elif dem_type == 'dtm':
            d = pdal.json_add_classification_filter(d, 2)

        if decimation is not None:
            d = pdal.json_add_decimation_filter(d, decimation)

        pdal.json_add_readers(d, [input_point_cloud])
        pdal.run_pipeline(d, verbose=verbose)

    def worker():
        global error

        while True:
            (num, q) = pq.get()
            if q is None or error is not None:
                pq.task_done()
                break

            try:
                process_one(q)
            except Exception as e:
                error = e
            finally:
                pq.task_done()

    if max_workers > 1:
        use_single_thread = False
        pq = queue.PriorityQueue()
        threads = []
        for i in range(max_workers):
            t = threading.Thread(target=worker)
            t.start()
            threads.append(t)

        for t in tiles:
            pq.put((i, t.copy()))

        def stop_workers():
            for i in range(len(threads)):
                pq.put((-1, None))
            for t in threads:
                t.join()

        # block until all tasks are done
        try:
            while pq.unfinished_tasks > 0:
                time.sleep(0.5)
        except KeyboardInterrupt:
            print("CTRL+C terminating...")
            stop_workers()
            sys.exit(1)

        stop_workers()

        if error is not None:
            # Try to reprocess using a single thread
            # in case this was a memory error
            log.ODM_WARNING("DEM processing failed with multiple threads, let's retry with a single thread...")
            use_single_thread = True
    else:
        use_single_thread = True

    if use_single_thread:
        # Boring, single thread processing
        for q in tiles:
            process_one(q)

    output_file = "%s.tif" % dem_type
    output_path = os.path.abspath(os.path.join(outdir, output_file))

    # Verify tile results
    for t in tiles: 
        if not os.path.exists(t['filename']):
            raise Exception("Error creating %s, %s failed to be created" % (output_file, t['filename']))
    
    # Create virtual raster
    vrt_path = os.path.abspath(os.path.join(outdir, "merged.vrt"))
    run('gdalbuildvrt "%s" "%s"' % (vrt_path, '" "'.join(map(lambda t: t['filename'], tiles))))

    geotiff_tmp_path = os.path.abspath(os.path.join(outdir, 'merged.tmp.tif'))
    geotiff_path = os.path.abspath(os.path.join(outdir, 'merged.tif'))

    # Build GeoTIFF
    kwargs = {
        'max_memory': get_max_memory(),
        'threads': max_workers if max_workers else 'ALL_CPUS',
        'vrt': vrt_path,
        'geotiff': geotiff_path,
        'geotiff_tmp': geotiff_tmp_path
    }

    if gapfill:
        # Sometimes, for some reason gdal_fillnodata.py
        # behaves strangely when reading data directly from a .VRT
        # so we need to convert to GeoTIFF first.
        run('gdal_translate '
                '-co NUM_THREADS={threads} '
                '--config GDAL_CACHEMAX {max_memory}% '
                '{vrt} {geotiff_tmp}'.format(**kwargs))

        run('gdal_fillnodata.py '
            '-co NUM_THREADS={threads} '
            '--config GDAL_CACHEMAX {max_memory}% '
            '-b 1 '
            '-of GTiff '
            '{geotiff_tmp} {geotiff}'.format(**kwargs))
    else:
        run('gdal_translate '
                '-co NUM_THREADS={threads} '
                '--config GDAL_CACHEMAX {max_memory}% '
                '{vrt} {geotiff}'.format(**kwargs))
        

    post_process(geotiff_path, output_path)
    os.remove(geotiff_path)

    if os.path.exists(geotiff_tmp_path):
        if not keep_unfilled_copy: 
            os.remove(geotiff_tmp_path)
        else:
            os.rename(geotiff_tmp_path, io.related_file_path(output_path, postfix=".unfilled"))
            
    if os.path.exists(vrt_path): os.remove(vrt_path)
    for t in tiles:
        if os.path.exists(t['filename']): os.remove(t['filename'])
    
    log.ODM_INFO('Completed %s in %s' % (output_file, datetime.now() - start))
Exemple #23
0
    def process(self, args, outputs):
        cm = outputs["cm"]

        georeconstruction_dir = outputs["georeconstruction_dir"]
        
        # Create dense dir
        outputs["dense_dir"] = os.path.join(outputs["project_path"], "dense")
        if not os.path.exists(outputs["dense_dir"]):
            system.mkdir_p(outputs["dense_dir"])

        if not has_gpus() or args.use_mve_dense:
            output_type = "PMVS"
            outputs["dense_workspace_dir"] = os.path.join(outputs["dense_dir"], "pmvs")
            already_run_undistortion = os.path.exists(outputs["dense_workspace_dir"])
        else:
            output_type = "COLMAP"
            outputs["dense_workspace_dir"] = outputs["dense_dir"]
            already_run_undistortion = os.path.exists(os.path.join(outputs["dense_dir"], "images"))
            
        if not already_run_undistortion or self.rerun():
            log.ODM_INFO("Undistorting images using a %s workspace" % output_type.lower())

            # Undistort images
            cm.run("image_undistorter", image_path=outputs["images_dir"],
                                        input_path=georeconstruction_dir,
                                        output_path=outputs["dense_dir"],
                                        output_type=output_type)

        if output_type == "COLMAP":
            outputs["point_cloud_ply_file"] = os.path.join(outputs["dense_workspace_dir"], "fused.ply")
            outputs["undistorted_dir"] = os.path.join(outputs["dense_workspace_dir"], "images")
        else:
            outputs["dense_mve_dir"] = os.path.join(outputs["dense_workspace_dir"], "mve")
            outputs["point_cloud_ply_file"] = os.path.join(outputs["dense_mve_dir"], "mve_dense_point_cloud.ply")
            outputs["undistorted_dir"] = os.path.join(outputs["dense_workspace_dir"], "bundler")

        if not os.path.exists(outputs["point_cloud_ply_file"]) or self.rerun():
            if output_type == "COLMAP":
                # Use COLMAP, easy
                kwargs = {
                    'PatchMatchStereo.geom_consistency': 'true'
                }

                cm.run("patch_match_stereo", workspace_path=outputs["dense_workspace_dir"],
                                             workspace_format="COLMAP",
                                             **kwargs)

                kwargs = {}
                
                cm.run("stereo_fusion", workspace_path=outputs["dense_workspace_dir"],
                                        workspace_format="COLMAP",
                                        input_type="geometric",
                                        output_path=outputs["point_cloud_ply_file"],
                                        **kwargs)
            else:
                # Use MVE

                # Create directory structure so makescene is happy...
                if os.path.exists(outputs["dense_mve_dir"]) and self.rerun():
                    log.ODM_WARNING("Removing %s" % outputs["dense_mve_dir"])
                    shutil.rmtree(outputs["dense_mve_dir"])

                bundler_dir = os.path.join(outputs["dense_workspace_dir"], "bundler")
                bundle_dir = os.path.join(bundler_dir, "bundle")
                if os.path.exists(outputs["dense_mve_dir"]) and self.rerun():
                    log.ODM_WARNING("Removing %s" % bundle_dir)
                    shutil.rmtree(bundle_dir)
                
                # Create dense/pmvs/bundle
                system.mkdir_p(bundle_dir)

                bundle_rd_out_file = os.path.join(outputs["dense_workspace_dir"], "bundle.rd.out")
                bundle_image_list = os.path.join(outputs["dense_workspace_dir"], "bundle.rd.out.list.txt")

                # Copy bundle.rd.out --> bundler/bundle/bundle.out
                shutil.copy(bundle_rd_out_file, os.path.join(bundle_dir, "bundle.out"))

                # Read image list
                with open(bundle_image_list, "r") as f:
                    images = filter(len, map(str.strip, f.read().split("\n")))
                
                visualize = os.listdir(os.path.join(outputs["dense_workspace_dir"], "visualize"))
                visualize.sort()
                visualize = [os.path.join(outputs["dense_workspace_dir"], "visualize", v) for v in visualize]

                # Copy each image from visualize/########N{8}.jpg to bundle/images[N]
                # TODO: check tiff extensions?
                for i, src in enumerate(visualize):
                    dst = os.path.join(bundler_dir, images[i])
                    log.ODM_INFO("Copying %s --> %s" % (os.path.basename(src), os.path.basename(dst)))

                    # Could make it faster by moving, but then we mess up the structure...
                    shutil.copy(src, dst)
                
                # Copy image list (bundle.rd.out.list.txt --> bundler/list.txt)
                shutil.copy(bundle_image_list, os.path.join(bundler_dir, "list.txt"))

                # Run makescene
                if os.path.exists(outputs["dense_mve_dir"]):
                    log.ODM_WARNING("Removing %s" % outputs["dense_mve_dir"])
                    shutil.rmtree(outputs["dense_mve_dir"])

                system.run("makescene \"{}\" \"{}\"".format(bundler_dir, outputs["dense_mve_dir"]))

                # Read image dimension
                # TODO: this can be improved, see below
                width, height = get_image_size(os.path.join(bundler_dir, images[0]))
                log.ODM_INFO("Image dimensions: (%s, %s)" % (width, height))
                size = max(width, height)

                max_pixels = args.depthmap_resolution * args.depthmap_resolution
                if size * size <= max_pixels:
                    mve_output_scale = 0
                else:
                    ratio = float(size* size) / float(max_pixels)
                    mve_output_scale = int(math.ceil(math.log(ratio) / math.log(4.0)))

                # TODO: we don't have a limit on undistortion dimensions
                # Compute mve output scale based on depthmap_resolution
                #max_pixels = args.depthmap_resolution * args.depthmap_resolution
                # if outputs['undist_image_max_size'] * outputs['undist_image_max_size'] <= max_pixels:
                #     mve_output_scale = 0
                # else:
                #     ratio = float(outputs['undist_image_max_size'] * outputs['undist_image_max_size']) / float(max_pixels)
                #     mve_output_scale = int(math.ceil(math.log(ratio) / math.log(4.0)))

                dmrecon_config = [
                    "-s%s" % mve_output_scale,
                    "--progress=fancy",
                    "--local-neighbors=2",
                ]

                # Run MVE's dmrecon
                log.ODM_INFO("Running dense reconstruction. This might take a while.")
            
                # TODO: find out why MVE is crashing at random
                # MVE *seems* to have a race condition, triggered randomly, regardless of dataset
                # https://gist.github.com/pierotofy/6c9ce93194ba510b61e42e3698cfbb89
                # Temporary workaround is to retry the reconstruction until we get it right
                # (up to a certain number of retries).
                retry_count = 1
                while retry_count < 10:
                    try:
                        system.run('dmrecon %s "%s"' % (' '.join(dmrecon_config), outputs["dense_mve_dir"]))
                        break
                    except Exception as e:
                        if str(e) == "Child returned 134" or str(e) == "Child returned 1":
                            retry_count += 1
                            log.ODM_WARNING("Caught error code, retrying attempt #%s" % retry_count)
                        else:
                            raise e

                scene2pset_config = [
                    "-F%s" % mve_output_scale
                ]

                system.run('scene2pset %s "%s" "%s"' % (' '.join(scene2pset_config), outputs["dense_mve_dir"], outputs["point_cloud_ply_file"]))
        
                # run cleanmesh (filter points by MVE confidence threshold)
                if args.mve_confidence > 0:
                    mve_filtered_model = io.related_file_path(outputs["point_cloud_ply_file"], postfix=".filtered")
                    system.run('meshclean -t%s --no-clean --component-size=0 "%s" "%s"' % (min(1.0, args.mve_confidence), outputs["point_cloud_ply_file"], mve_filtered_model))

                    if io.file_exists(mve_filtered_model):
                        os.remove(outputs["point_cloud_ply_file"])
                        os.rename(mve_filtered_model, outputs["point_cloud_ply_file"])
                    else:
                        log.ODM_WARNING("Couldn't filter MVE model (%s does not exist)." % mve_filtered_model)
        else:
            log.ODM_WARNING('Found existing dense model in: %s' % outputs["point_cloud_ply_file"])
Exemple #24
0
    def process(self, args, outputs):
        tree = outputs['tree']
        reconstruction = outputs['reconstruction']

        dem_input = tree.odm_georeferencing_model_laz
        pc_model_found = io.file_exists(dem_input)
        ignore_resolution = False
        pseudo_georeference = False

        if not reconstruction.is_georeferenced():
            # Special case to clear previous run point cloud
            # (NodeODM will generate a fake georeferenced laz during postprocessing
            # with non-georeferenced datasets). odm_georeferencing_model_laz should
            # not be here! Perhaps we should improve this.
            if io.file_exists(
                    tree.odm_georeferencing_model_laz) and self.rerun():
                os.remove(tree.odm_georeferencing_model_laz)

            log.ODM_WARNING(
                "Not georeferenced, using ungeoreferenced point cloud...")
            dem_input = tree.path("odm_filterpoints", "point_cloud.ply")
            pc_model_found = io.file_exists(dem_input)
            ignore_resolution = True
            pseudo_georeference = True

        resolution = gsd.cap_resolution(args.dem_resolution,
                                        tree.opensfm_reconstruction,
                                        gsd_error_estimate=-3,
                                        ignore_gsd=args.ignore_gsd,
                                        ignore_resolution=ignore_resolution,
                                        has_gcp=reconstruction.has_gcp())

        log.ODM_INFO('Classify: ' + str(args.pc_classify))
        log.ODM_INFO('Create DSM: ' + str(args.dsm))
        log.ODM_INFO('Create DTM: ' + str(args.dtm))
        log.ODM_INFO('DEM input file {0} found: {1}'.format(
            dem_input, str(pc_model_found)))

        # define paths and create working directories
        odm_dem_root = tree.path('odm_dem')
        if not io.dir_exists(odm_dem_root):
            system.mkdir_p(odm_dem_root)

        if args.pc_classify and pc_model_found:
            pc_classify_marker = os.path.join(odm_dem_root,
                                              'pc_classify_done.txt')

            if not io.file_exists(pc_classify_marker) or self.rerun():
                log.ODM_INFO(
                    "Classifying {} using Simple Morphological Filter".format(
                        dem_input))
                commands.classify(dem_input,
                                  args.smrf_scalar,
                                  args.smrf_slope,
                                  args.smrf_threshold,
                                  args.smrf_window,
                                  verbose=args.verbose)

                with open(pc_classify_marker, 'w') as f:
                    f.write('Classify: smrf\n')
                    f.write('Scalar: {}\n'.format(args.smrf_scalar))
                    f.write('Slope: {}\n'.format(args.smrf_slope))
                    f.write('Threshold: {}\n'.format(args.smrf_threshold))
                    f.write('Window: {}\n'.format(args.smrf_window))

        progress = 20
        self.update_progress(progress)

        if args.pc_rectify:
            commands.rectify(dem_input, args.debug)

        # Do we need to process anything here?
        if (args.dsm or args.dtm) and pc_model_found:
            dsm_output_filename = os.path.join(odm_dem_root, 'dsm.tif')
            dtm_output_filename = os.path.join(odm_dem_root, 'dtm.tif')

            if (args.dtm and not io.file_exists(dtm_output_filename)) or \
                (args.dsm and not io.file_exists(dsm_output_filename)) or \
                self.rerun():

                products = []

                if args.dsm or (args.dtm and args.dem_euclidean_map):
                    products.append('dsm')
                if args.dtm: products.append('dtm')

                radius_steps = [(resolution / 100.0) / 2.0]
                for _ in range(args.dem_gapfill_steps - 1):
                    radius_steps.append(
                        radius_steps[-1] *
                        2)  # 2 is arbitrary, maybe there's a better value?

                for product in products:
                    commands.create_dem(
                        dem_input,
                        product,
                        output_type='idw' if product == 'dtm' else 'max',
                        radiuses=map(str, radius_steps),
                        gapfill=args.dem_gapfill_steps > 0,
                        outdir=odm_dem_root,
                        resolution=resolution / 100.0,
                        decimation=args.dem_decimation,
                        verbose=args.verbose,
                        max_workers=args.max_concurrency,
                        keep_unfilled_copy=args.dem_euclidean_map)

                    dem_geotiff_path = os.path.join(odm_dem_root,
                                                    "{}.tif".format(product))
                    bounds_file_path = os.path.join(
                        tree.odm_georeferencing,
                        'odm_georeferenced_model.bounds.gpkg')

                    if args.crop > 0:
                        # Crop DEM
                        Cropper.crop(bounds_file_path, dem_geotiff_path,
                                     utils.get_dem_vars(args))

                    if args.dem_euclidean_map:
                        unfilled_dem_path = io.related_file_path(
                            dem_geotiff_path, postfix=".unfilled")

                        if args.crop > 0:
                            # Crop unfilled DEM
                            Cropper.crop(bounds_file_path, unfilled_dem_path,
                                         utils.get_dem_vars(args))

                        commands.compute_euclidean_map(
                            unfilled_dem_path,
                            io.related_file_path(dem_geotiff_path,
                                                 postfix=".euclideand"),
                            overwrite=True)

                    if pseudo_georeference:
                        # 0.1 is arbitrary
                        pseudogeo.add_pseudo_georeferencing(
                            dem_geotiff_path, 0.1)

                    progress += 30
                    self.update_progress(progress)
            else:
                log.ODM_WARNING('Found existing outputs in: %s' % odm_dem_root)
        else:
            log.ODM_WARNING('DEM will not be generated')