Пример #1
0
    def process(self, args, outputs):
        tree = outputs['tree']
        las_model_found = io.file_exists(tree.odm_georeferencing_model_laz)

        log.ODM_INFO('Classify: ' + str(args.pc_classify))
        log.ODM_INFO('Create DSM: ' + str(args.dsm))
        log.ODM_INFO('Create DTM: ' + str(args.dtm))
        log.ODM_INFO('DEM input file {0} found: {1}'.format(tree.odm_georeferencing_model_laz, str(las_model_found)))

        # define paths and create working directories
        odm_dem_root = tree.path('odm_dem')
        if not io.dir_exists(odm_dem_root):
            system.mkdir_p(odm_dem_root)

        if args.pc_classify and las_model_found:
            pc_classify_marker = os.path.join(odm_dem_root, 'pc_classify_done.txt')

            if not io.file_exists(pc_classify_marker) or self.rerun():
                log.ODM_INFO("Classifying {} using Simple Morphological Filter".format(tree.odm_georeferencing_model_laz))
                commands.classify(tree.odm_georeferencing_model_laz,
                                  args.smrf_scalar, 
                                  args.smrf_slope, 
                                  args.smrf_threshold, 
                                  args.smrf_window,
                                  verbose=args.verbose
                                )

                with open(pc_classify_marker, 'w') as f:
                    f.write('Classify: smrf\n')
                    f.write('Scalar: {}\n'.format(args.smrf_scalar))
                    f.write('Slope: {}\n'.format(args.smrf_slope))
                    f.write('Threshold: {}\n'.format(args.smrf_threshold))
                    f.write('Window: {}\n'.format(args.smrf_window))
            
        progress = 20
        self.update_progress(progress)

        # Do we need to process anything here?
        if (args.dsm or args.dtm) and las_model_found:
            dsm_output_filename = os.path.join(odm_dem_root, 'dsm.tif')
            dtm_output_filename = os.path.join(odm_dem_root, 'dtm.tif')

            if (args.dtm and not io.file_exists(dtm_output_filename)) or \
                (args.dsm and not io.file_exists(dsm_output_filename)) or \
                self.rerun():

                products = []
                if args.dsm: products.append('dsm')
                if args.dtm: products.append('dtm')
                
                resolution = gsd.cap_resolution(args.dem_resolution, tree.opensfm_reconstruction, gsd_error_estimate=-3, ignore_gsd=args.ignore_gsd)
                radius_steps = [(resolution / 100.0) / 2.0]
                for _ in range(args.dem_gapfill_steps - 1):
                    radius_steps.append(radius_steps[-1] * 2) # 2 is arbitrary, maybe there's a better value?

                for product in products:
                    commands.create_dem(
                            tree.odm_georeferencing_model_laz,
                            product,
                            output_type='idw' if product == 'dtm' else 'max',
                            radiuses=map(str, radius_steps),
                            gapfill=args.dem_gapfill_steps > 0,
                            outdir=odm_dem_root,
                            resolution=resolution / 100.0,
                            decimation=args.dem_decimation,
                            verbose=args.verbose,
                            max_workers=args.max_concurrency,
                            keep_unfilled_copy=args.dem_euclidean_map
                        )

                    dem_geotiff_path = os.path.join(odm_dem_root, "{}.tif".format(product))
                    bounds_file_path = os.path.join(tree.odm_georeferencing, 'odm_georeferenced_model.bounds.gpkg')

                    if args.crop > 0:
                        # Crop DEM
                        Cropper.crop(bounds_file_path, dem_geotiff_path, utils.get_dem_vars(args))

                    if args.dem_euclidean_map:
                        unfilled_dem_path = io.related_file_path(dem_geotiff_path, postfix=".unfilled")
                        
                        if args.crop > 0:
                            # Crop unfilled DEM
                            Cropper.crop(bounds_file_path, unfilled_dem_path, utils.get_dem_vars(args))

                        commands.compute_euclidean_map(unfilled_dem_path, 
                                            io.related_file_path(dem_geotiff_path, postfix=".euclideand"), 
                                            overwrite=True)
                    
                    progress += 30
                    self.update_progress(progress)
            else:
                log.ODM_WARNING('Found existing outputs in: %s' % odm_dem_root)
        else:
            log.ODM_WARNING('DEM will not be generated')
Пример #2
0
def euclidean_merge_dems(input_dems, output_dem, creation_options={}):
    """
    Based on https://github.com/mapbox/rio-merge-rgba
    and ideas from Anna Petrasova
    implementation by Piero Toffanin

    Computes a merged DEM by computing/using a euclidean 
    distance to NODATA cells map for all DEMs and then blending all overlapping DEM cells 
    by a weighted average based on such euclidean distance.
    """
    inputs = []
    bounds=None
    precision=7

    existing_dems = []
    for dem in input_dems:
        if not io.file_exists(dem):
            log.ODM_WARNING("%s does not exist. Will skip from merged DEM." % dem)
            continue
        existing_dems.append(dem)

    if len(existing_dems) == 0:
        log.ODM_WARNING("No input DEMs, skipping euclidean merge.")
        return

    with rasterio.open(existing_dems[0]) as first:
        src_nodata = first.nodatavals[0]
        res = first.res
        dtype = first.dtypes[0]
        profile = first.profile

    for dem in existing_dems:
        eumap = compute_euclidean_map(dem, io.related_file_path(dem, postfix=".euclideand"), overwrite=False)
        if eumap and io.file_exists(eumap):
            inputs.append((dem, eumap))

    log.ODM_INFO("%s valid DEM rasters to merge" % len(inputs))

    sources = [(rasterio.open(d), rasterio.open(e)) for d,e in inputs]

    # Extent from option or extent of all inputs.
    if bounds:
        dst_w, dst_s, dst_e, dst_n = bounds
    else:
        # scan input files.
        # while we're at it, validate assumptions about inputs
        xs = []
        ys = []
        for src_d, src_e in sources:
            if not same_bounds(src_d, src_e):
                raise ValueError("DEM and euclidean file must have the same bounds")

            left, bottom, right, top = src_d.bounds
            xs.extend([left, right])
            ys.extend([bottom, top])
            if src_d.profile["count"] != 1 or src_e.profile["count"] != 1:
                raise ValueError("Inputs must be 1-band rasters")
        dst_w, dst_s, dst_e, dst_n = min(xs), min(ys), max(xs), max(ys)
    log.ODM_INFO("Output bounds: %r %r %r %r" % (dst_w, dst_s, dst_e, dst_n))

    output_transform = Affine.translation(dst_w, dst_n)
    output_transform *= Affine.scale(res[0], -res[1])

    # Compute output array shape. We guarantee it will cover the output
    # bounds completely.
    output_width = int(math.ceil((dst_e - dst_w) / res[0]))
    output_height = int(math.ceil((dst_n - dst_s) / res[1]))

    # Adjust bounds to fit.
    dst_e, dst_s = output_transform * (output_width, output_height)
    log.ODM_INFO("Output width: %d, height: %d" % (output_width, output_height))
    log.ODM_INFO("Adjusted bounds: %r %r %r %r" % (dst_w, dst_s, dst_e, dst_n))

    profile["transform"] = output_transform
    profile["height"] = output_height
    profile["width"] = output_width
    profile["tiled"] = creation_options.get('TILED', 'YES') == 'YES'
    profile["blockxsize"] = creation_options.get('BLOCKXSIZE', 512)
    profile["blockysize"] = creation_options.get('BLOCKYSIZE', 512)
    profile["compress"] = creation_options.get('COMPRESS', 'LZW')
    profile["nodata"] = src_nodata

    # Creation opts
    profile.update(creation_options)

    # create destination file
    with rasterio.open(output_dem, "w", **profile) as dstrast:

        for idx, dst_window in dstrast.block_windows():

            left, bottom, right, top = dstrast.window_bounds(dst_window)

            blocksize = dst_window.width
            dst_rows, dst_cols = (dst_window.height, dst_window.width)

            # initialize array destined for the block
            dst_count = first.count
            dst_shape = (dst_count, dst_rows, dst_cols)

            dstarr = np.zeros(dst_shape, dtype=dtype)
            distsum = np.zeros(dst_shape, dtype=dtype)

            for src_d, src_e in sources:
                # The full_cover behavior is problematic here as it includes
                # extra pixels along the bottom right when the sources are
                # slightly misaligned
                #
                # src_window = get_window(left, bottom, right, top,
                #                         src.transform, precision=precision)
                #
                # With rio merge this just adds an extra row, but when the
                # imprecision occurs at each block, you get artifacts

                nodata = src_d.nodatavals[0]

                # Alternative, custom get_window using rounding
                src_window = tuple(zip(rowcol(
                        src_d.transform, left, top, op=round, precision=precision
                    ), rowcol(
                        src_d.transform, right, bottom, op=round, precision=precision
                    )))

                temp_d = np.zeros(dst_shape, dtype=dtype)
                temp_d = src_d.read(
                    out=temp_d, window=src_window, boundless=True, masked=False
                )

                temp_e = np.zeros(dst_shape, dtype=dtype)
                temp_e = src_e.read(
                    out=temp_e, window=src_window, boundless=True, masked=False
                )

                # Set NODATA areas in the euclidean map to a very low value
                # so that:
                #  - Areas with overlap prioritize DEM layers' cells that 
                #    are far away from NODATA areas
                #  - Areas that have no overlap are included in the final result
                #    even if they are very close to a NODATA cell
                temp_e[temp_e==0] = 0.001953125
                temp_e[temp_d==nodata] = 0

                np.multiply(temp_d, temp_e, out=temp_d)
                np.add(dstarr, temp_d, out=dstarr)
                np.add(distsum, temp_e, out=distsum)

            np.divide(dstarr, distsum, out=dstarr, where=distsum[0] != 0.0)
            dstarr[dstarr == 0.0] = src_nodata

            dstrast.write(dstarr, window=dst_window)

    return output_dem
Пример #3
0
    def process(self, args, outputs):
        tree = outputs['tree']
        reconstruction = outputs['reconstruction']

        dem_input = tree.odm_georeferencing_model_laz
        pc_model_found = io.file_exists(dem_input)
        ignore_resolution = False
        pseudo_georeference = False

        if not reconstruction.is_georeferenced():
            # Special case to clear previous run point cloud
            # (NodeODM will generate a fake georeferenced laz during postprocessing
            # with non-georeferenced datasets). odm_georeferencing_model_laz should
            # not be here! Perhaps we should improve this.
            if io.file_exists(
                    tree.odm_georeferencing_model_laz) and self.rerun():
                os.remove(tree.odm_georeferencing_model_laz)

            log.ODM_WARNING(
                "Not georeferenced, using ungeoreferenced point cloud...")
            dem_input = tree.path("odm_filterpoints", "point_cloud.ply")
            pc_model_found = io.file_exists(dem_input)
            ignore_resolution = True
            pseudo_georeference = True

        resolution = gsd.cap_resolution(args.dem_resolution,
                                        tree.opensfm_reconstruction,
                                        gsd_error_estimate=-3,
                                        ignore_gsd=args.ignore_gsd,
                                        ignore_resolution=ignore_resolution,
                                        has_gcp=reconstruction.has_gcp())

        log.ODM_INFO('Classify: ' + str(args.pc_classify))
        log.ODM_INFO('Create DSM: ' + str(args.dsm))
        log.ODM_INFO('Create DTM: ' + str(args.dtm))
        log.ODM_INFO('DEM input file {0} found: {1}'.format(
            dem_input, str(pc_model_found)))

        # define paths and create working directories
        odm_dem_root = tree.path('odm_dem')
        if not io.dir_exists(odm_dem_root):
            system.mkdir_p(odm_dem_root)

        if args.pc_classify and pc_model_found:
            pc_classify_marker = os.path.join(odm_dem_root,
                                              'pc_classify_done.txt')

            if not io.file_exists(pc_classify_marker) or self.rerun():
                log.ODM_INFO(
                    "Classifying {} using Simple Morphological Filter".format(
                        dem_input))
                commands.classify(dem_input,
                                  args.smrf_scalar,
                                  args.smrf_slope,
                                  args.smrf_threshold,
                                  args.smrf_window,
                                  verbose=args.verbose)

                with open(pc_classify_marker, 'w') as f:
                    f.write('Classify: smrf\n')
                    f.write('Scalar: {}\n'.format(args.smrf_scalar))
                    f.write('Slope: {}\n'.format(args.smrf_slope))
                    f.write('Threshold: {}\n'.format(args.smrf_threshold))
                    f.write('Window: {}\n'.format(args.smrf_window))

        progress = 20
        self.update_progress(progress)

        if args.pc_rectify:
            commands.rectify(dem_input, args.debug)

        # Do we need to process anything here?
        if (args.dsm or args.dtm) and pc_model_found:
            dsm_output_filename = os.path.join(odm_dem_root, 'dsm.tif')
            dtm_output_filename = os.path.join(odm_dem_root, 'dtm.tif')

            if (args.dtm and not io.file_exists(dtm_output_filename)) or \
                (args.dsm and not io.file_exists(dsm_output_filename)) or \
                self.rerun():

                products = []

                if args.dsm or (args.dtm and args.dem_euclidean_map):
                    products.append('dsm')
                if args.dtm: products.append('dtm')

                radius_steps = [(resolution / 100.0) / 2.0]
                for _ in range(args.dem_gapfill_steps - 1):
                    radius_steps.append(
                        radius_steps[-1] *
                        2)  # 2 is arbitrary, maybe there's a better value?

                for product in products:
                    commands.create_dem(
                        dem_input,
                        product,
                        output_type='idw' if product == 'dtm' else 'max',
                        radiuses=map(str, radius_steps),
                        gapfill=args.dem_gapfill_steps > 0,
                        outdir=odm_dem_root,
                        resolution=resolution / 100.0,
                        decimation=args.dem_decimation,
                        verbose=args.verbose,
                        max_workers=args.max_concurrency,
                        keep_unfilled_copy=args.dem_euclidean_map)

                    dem_geotiff_path = os.path.join(odm_dem_root,
                                                    "{}.tif".format(product))
                    bounds_file_path = os.path.join(
                        tree.odm_georeferencing,
                        'odm_georeferenced_model.bounds.gpkg')

                    if args.crop > 0:
                        # Crop DEM
                        Cropper.crop(bounds_file_path, dem_geotiff_path,
                                     utils.get_dem_vars(args))

                    if args.dem_euclidean_map:
                        unfilled_dem_path = io.related_file_path(
                            dem_geotiff_path, postfix=".unfilled")

                        if args.crop > 0:
                            # Crop unfilled DEM
                            Cropper.crop(bounds_file_path, unfilled_dem_path,
                                         utils.get_dem_vars(args))

                        commands.compute_euclidean_map(
                            unfilled_dem_path,
                            io.related_file_path(dem_geotiff_path,
                                                 postfix=".euclideand"),
                            overwrite=True)

                    if pseudo_georeference:
                        # 0.1 is arbitrary
                        pseudogeo.add_pseudo_georeferencing(
                            dem_geotiff_path, 0.1)

                    progress += 30
                    self.update_progress(progress)
            else:
                log.ODM_WARNING('Found existing outputs in: %s' % odm_dem_root)
        else:
            log.ODM_WARNING('DEM will not be generated')
Пример #4
0
    def process(self, args, outputs):
        tree = outputs['tree']
        reconstruction = outputs['reconstruction']

        dem_input = tree.odm_georeferencing_model_laz
        pc_model_found = io.file_exists(dem_input)
        ignore_resolution = False
        pseudo_georeference = False

        if not reconstruction.is_georeferenced():
            log.ODM_WARNING(
                "Not georeferenced, using ungeoreferenced point cloud...")
            ignore_resolution = True
            pseudo_georeference = True

        # It is probably not reasonable to have accurate DEMs a the same resolution as the source photos, so reduce it
        # by a factor!
        gsd_scaling = 2.0

        resolution = gsd.cap_resolution(args.dem_resolution,
                                        tree.opensfm_reconstruction,
                                        gsd_scaling=gsd_scaling,
                                        ignore_gsd=args.ignore_gsd,
                                        ignore_resolution=ignore_resolution
                                        and args.ignore_gsd,
                                        has_gcp=reconstruction.has_gcp())

        log.ODM_INFO('Classify: ' + str(args.pc_classify))
        log.ODM_INFO('Create DSM: ' + str(args.dsm))
        log.ODM_INFO('Create DTM: ' + str(args.dtm))
        log.ODM_INFO('DEM input file {0} found: {1}'.format(
            dem_input, str(pc_model_found)))

        # define paths and create working directories
        odm_dem_root = tree.path('odm_dem')
        if not io.dir_exists(odm_dem_root):
            system.mkdir_p(odm_dem_root)

        if args.pc_classify and pc_model_found:
            pc_classify_marker = os.path.join(odm_dem_root,
                                              'pc_classify_done.txt')

            if not io.file_exists(pc_classify_marker) or self.rerun():
                log.ODM_INFO(
                    "Classifying {} using Simple Morphological Filter".format(
                        dem_input))
                commands.classify(dem_input,
                                  args.smrf_scalar,
                                  args.smrf_slope,
                                  args.smrf_threshold,
                                  args.smrf_window,
                                  verbose=args.verbose)

                with open(pc_classify_marker, 'w') as f:
                    f.write('Classify: smrf\n')
                    f.write('Scalar: {}\n'.format(args.smrf_scalar))
                    f.write('Slope: {}\n'.format(args.smrf_slope))
                    f.write('Threshold: {}\n'.format(args.smrf_threshold))
                    f.write('Window: {}\n'.format(args.smrf_window))

        progress = 20
        self.update_progress(progress)

        if args.pc_rectify:
            commands.rectify(dem_input, args.debug)

        # Do we need to process anything here?
        if (args.dsm or args.dtm) and pc_model_found:
            dsm_output_filename = os.path.join(odm_dem_root, 'dsm.tif')
            dtm_output_filename = os.path.join(odm_dem_root, 'dtm.tif')

            if (args.dtm and not io.file_exists(dtm_output_filename)) or \
                (args.dsm and not io.file_exists(dsm_output_filename)) or \
                self.rerun():

                products = []

                if args.dsm or (args.dtm and args.dem_euclidean_map):
                    products.append('dsm')
                if args.dtm: products.append('dtm')

                radius_steps = [(resolution / 100.0) / 2.0]
                for _ in range(args.dem_gapfill_steps - 1):
                    radius_steps.append(
                        radius_steps[-1] *
                        2)  # 2 is arbitrary, maybe there's a better value?

                for product in products:
                    commands.create_dem(
                        dem_input,
                        product,
                        output_type='idw' if product == 'dtm' else 'max',
                        radiuses=list(map(str, radius_steps)),
                        gapfill=args.dem_gapfill_steps > 0,
                        outdir=odm_dem_root,
                        resolution=resolution / 100.0,
                        decimation=args.dem_decimation,
                        verbose=args.verbose,
                        max_workers=args.max_concurrency,
                        keep_unfilled_copy=args.dem_euclidean_map)

                    dem_geotiff_path = os.path.join(odm_dem_root,
                                                    "{}.tif".format(product))
                    bounds_file_path = os.path.join(
                        tree.odm_georeferencing,
                        'odm_georeferenced_model.bounds.gpkg')

                    if args.crop > 0 or args.boundary:
                        # Crop DEM
                        Cropper.crop(
                            bounds_file_path,
                            dem_geotiff_path,
                            utils.get_dem_vars(args),
                            keep_original=not args.optimize_disk_space)

                    if args.dem_euclidean_map:
                        unfilled_dem_path = io.related_file_path(
                            dem_geotiff_path, postfix=".unfilled")

                        if args.crop > 0 or args.boundary:
                            # Crop unfilled DEM
                            Cropper.crop(
                                bounds_file_path,
                                unfilled_dem_path,
                                utils.get_dem_vars(args),
                                keep_original=not args.optimize_disk_space)

                        commands.compute_euclidean_map(
                            unfilled_dem_path,
                            io.related_file_path(dem_geotiff_path,
                                                 postfix=".euclideand"),
                            overwrite=True)

                    if pseudo_georeference:
                        pseudogeo.add_pseudo_georeferencing(dem_geotiff_path)

                    if args.tiles:
                        generate_dem_tiles(dem_geotiff_path,
                                           tree.path("%s_tiles" % product),
                                           args.max_concurrency)

                    if args.cog:
                        convert_to_cogeo(dem_geotiff_path,
                                         max_workers=args.max_concurrency)

                    progress += 30
                    self.update_progress(progress)
            else:
                log.ODM_WARNING('Found existing outputs in: %s' % odm_dem_root)
        else:
            log.ODM_WARNING('DEM will not be generated')