Example #1
0
def post_point_cloud_steps(args, tree, rerun=False):
    # XYZ point cloud output
    if args.pc_csv:
        log.ODM_INFO("Creating CSV file (XYZ format)")
        
        if not io.file_exists(tree.odm_georeferencing_xyz_file) or rerun:
            system.run("pdal translate -i \"{}\" "
                "-o \"{}\" "
                "--writers.text.format=csv "
                "--writers.text.order=\"X,Y,Z\" "
                "--writers.text.keep_unspecified=false ".format(
                    tree.odm_georeferencing_model_laz,
                    tree.odm_georeferencing_xyz_file))
        else:
            log.ODM_WARNING("Found existing CSV file %s" % tree.odm_georeferencing_xyz_file)

    # LAS point cloud output
    if args.pc_las:
        log.ODM_INFO("Creating LAS file")
        
        if not io.file_exists(tree.odm_georeferencing_model_las) or rerun:
            system.run("pdal translate -i \"{}\" "
                "-o \"{}\" ".format(
                    tree.odm_georeferencing_model_laz,
                    tree.odm_georeferencing_model_las))
        else:
            log.ODM_WARNING("Found existing LAS file %s" % tree.odm_georeferencing_xyz_file)

    # EPT point cloud output
    if args.pc_ept:
        log.ODM_INFO("Creating Entwine Point Tile output")
        entwine.build([tree.odm_georeferencing_model_laz], tree.entwine_pointcloud, max_concurrency=args.max_concurrency, rerun=rerun)
Example #2
0
            def merge_dems(dem_filename, human_name):
                if not io.dir_exists(tree.path('odm_dem')):
                    system.mkdir_p(tree.path('odm_dem'))

                dem_file = tree.path("odm_dem", dem_filename)
                if not io.file_exists(dem_file) or self.rerun():
                    all_dems = get_submodel_paths(tree.submodels_path,
                                                  "odm_dem", dem_filename)
                    log.ODM_INFO("Merging %ss" % human_name)

                    # Merge
                    dem_vars = utils.get_dem_vars(args)
                    euclidean_merge_dems(all_dems, dem_file, dem_vars)

                    if io.file_exists(dem_file):
                        # Crop
                        if args.crop > 0:
                            Cropper.crop(merged_bounds_file, dem_file,
                                         dem_vars)
                        log.ODM_INFO("Created %s" % dem_file)
                    else:
                        log.ODM_WARNING("Cannot merge %s, %s was not created" %
                                        (human_name, dem_file))
                else:
                    log.ODM_WARNING("Found merged %s in %s" %
                                    (human_name, dem_filename))
Example #3
0
def no_resize(src_dir,target_dir,rerun_cell,photo):
    # define image paths
    path_file = photo.path_file
    new_path_file = io.join_paths(target_dir, photo.filename)
    # set raw image path in case we want to rerun cell
    if io.file_exists(new_path_file) and rerun_cell:
        path_file = io.join_paths(src_dir, photo.filename)

    if not io.file_exists(new_path_file) or rerun_cell:
        img = cv2.imread(path_file)
        io.copy(path_file, new_path_file)
        photo.path_file = new_path_file
        photo.width = img.shape[1]
        photo.height = img.shape[0]
        photo.update_focal()
        
        # log message
        log.ODM_DEBUG('Copied %s | dimensions: %s' %
                      (photo.filename, img.shape))
    else:
        # log message
        log.ODM_WARNING('Already copied %s | dimensions: %s x %s' %
                        (photo.filename, photo.width, photo.height))
                        
    return photo
Example #4
0
    def is_reconstruction_done(self):
        tracks_file = os.path.join(self.opensfm_project_path, 'tracks.csv')
        reconstruction_file = os.path.join(self.opensfm_project_path,
                                           'reconstruction.json')

        return io.file_exists(tracks_file) and io.file_exists(
            reconstruction_file)
Example #5
0
def no_resize(src_dir, target_dir, rerun_cell, photo):
    # define image paths
    path_file = photo.path_file
    new_path_file = io.join_paths(target_dir, photo.filename)
    # set raw image path in case we want to rerun cell
    if io.file_exists(new_path_file) and rerun_cell:
        path_file = io.join_paths(src_dir, photo.filename)

    if not io.file_exists(new_path_file) or rerun_cell:
        img = cv2.imread(path_file)
        io.copy(path_file, new_path_file)
        photo.path_file = new_path_file
        photo.width = img.shape[0]
        photo.height = img.shape[1]
        photo.update_focal()

        # log message
        log.ODM_DEBUG('Copied %s | dimensions: %s' %
                      (photo.filename, img.shape))
    else:
        # log message
        log.ODM_WARNING('Already copied %s | dimensions: %s x %s' %
                        (photo.filename, photo.width, photo.height))

    return photo
Example #6
0
    def reconstruct(self, rerun=False):
        tracks_file = os.path.join(self.opensfm_project_path, 'tracks.csv')
        reconstruction_file = os.path.join(self.opensfm_project_path,
                                           'reconstruction.json')

        if not io.file_exists(tracks_file) or rerun:
            self.run('create_tracks')
        else:
            log.ODM_WARNING('Found a valid OpenSfM tracks file in: %s' %
                            tracks_file)

        if not io.file_exists(reconstruction_file) or rerun:
            self.run('reconstruct')
        else:
            log.ODM_WARNING(
                'Found a valid OpenSfM reconstruction file in: %s' %
                reconstruction_file)

        # Check that a reconstruction file has been created
        if not self.reconstructed():
            log.ODM_ERROR(
                "The program could not process this dataset using the current settings. "
                "Check that the images have enough overlap, "
                "that there are enough recognizable features "
                "and that the images are in focus. "
                "You could also try to increase the --min-num-features parameter."
                "The program will now exit.")
            exit(1)
Example #7
0
    def georeference_with_gps(self,
                              images_path,
                              output_coords_file,
                              output_model_txt_geo,
                              rerun=False):
        try:
            if not io.file_exists(output_coords_file) or rerun:
                location.extract_utm_coords(self.photos, images_path,
                                            output_coords_file)
            else:
                log.ODM_INFO("Coordinates file already exist: %s" %
                             output_coords_file)

            # Deprecated: This is mostly for backward compatibility and should be
            # be removed at some point
            if not io.file_exists(output_model_txt_geo) or rerun:
                with open(output_coords_file, 'r') as f:
                    with open(output_model_txt_geo, 'w+') as w:
                        w.write(f.readline())  # CRS
                        w.write(f.readline())  # Offset
            else:
                log.ODM_INFO("Model geo file already exist: %s" %
                             output_model_txt_geo)

            self.georef = ODM_GeoRef.FromCoordsFile(output_coords_file)
        except:
            log.ODM_WARNING(
                'Could not generate coordinates file. The orthophoto will not be georeferenced.'
            )

        self.gcp = GCPFile(None)
        return self.georef
Example #8
0
            def merge_dems(dem_filename, human_name):
                if not io.dir_exists(tree.path('odm_dem')):
                    system.mkdir_p(tree.path('odm_dem'))

                dem_file = tree.path("odm_dem", dem_filename)
                if not io.file_exists(dem_file) or self.rerun():
                    all_dems = get_submodel_paths(tree.submodels_path, "odm_dem", dem_filename)
                    log.ODM_INFO("Merging %ss" % human_name)

                    # Merge
                    dem_vars = utils.get_dem_vars(args)
                    eu_map_source = None  # Default

                    # Use DSM's euclidean map for DTMs
                    # (requires the DSM to be computed)
                    if human_name == "DTM":
                        eu_map_source = "dsm"

                    euclidean_merge_dems(all_dems, dem_file, dem_vars, euclidean_map_source=eu_map_source)

                    if io.file_exists(dem_file):
                        # Crop
                        if args.crop > 0:
                            Cropper.crop(merged_bounds_file, dem_file, dem_vars,
                                         keep_original=not args.optimize_disk_space)
                        log.ODM_INFO("Created %s" % dem_file)

                        if args.tiles:
                            generate_dem_tiles(dem_file, tree.path("%s_tiles" % human_name.lower()),
                                               args.max_concurrency)
                    else:
                        log.ODM_WARNING("Cannot merge %s, %s was not created" % (human_name, dem_file))

                else:
                    log.ODM_WARNING("Found merged %s in %s" % (human_name, dem_filename))
Example #9
0
def compute_cutline(orthophoto_file, crop_area_file, destination, max_concurrency=1, tmpdir=None, scale=1):
    if io.file_exists(orthophoto_file) and io.file_exists(crop_area_file):
        from opendm.grass_engine import grass
        log.ODM_DEBUG("Computing cutline")

        if tmpdir and not io.dir_exists(tmpdir):
            system.mkdir_p(tmpdir)

        scale = max(0.0001, min(1, scale))
        scaled_orthophoto = None

        if scale < 1:
            log.ODM_DEBUG("Scaling orthophoto to %s%% to compute cutline" % (scale * 100))

            scaled_orthophoto = os.path.join(tmpdir, os.path.basename(io.related_file_path(orthophoto_file, postfix=".scaled")))
            # Scale orthophoto before computing cutline
            system.run("gdal_translate -outsize {}% 0 "
                "-co NUM_THREADS={} "
                "--config GDAL_CACHEMAX {}% "
                "{} {}".format(
                scale * 100,
                max_concurrency,
                concurrency.get_max_memory(),
                orthophoto_file,
                scaled_orthophoto
            ))
            orthophoto_file = scaled_orthophoto

        try:
            ortho_width,ortho_height = get_image_size.get_image_size(orthophoto_file, fallback_on_error=False)
            log.ODM_DEBUG("Orthophoto dimensions are %sx%s" % (ortho_width, ortho_height))
            number_lines = int(max(8, math.ceil(min(ortho_width, ortho_height) / 256.0)))
        except:
            log.ODM_DEBUG("Cannot compute orthophoto dimensions, setting arbitrary number of lines.")
            number_lines = 32
        
        log.ODM_DEBUG("Number of lines: %s" % number_lines)

        gctx = grass.create_context({'auto_cleanup' : False, 'tmpdir': tmpdir})
        gctx.add_param('orthophoto_file', orthophoto_file)
        gctx.add_param('crop_area_file', crop_area_file)
        gctx.add_param('number_lines', number_lines)
        gctx.add_param('max_concurrency', max_concurrency)
        gctx.add_param('memory', int(concurrency.get_max_memory_mb(300)))
        gctx.set_location(orthophoto_file)

        cutline_file = gctx.execute(os.path.join("opendm", "grass", "compute_cutline.grass"))
        if cutline_file != 'error':
            if io.file_exists(cutline_file):
                shutil.move(cutline_file, destination)
                log.ODM_INFO("Generated cutline file: %s --> %s" % (cutline_file, destination))
                gctx.cleanup()
                return destination
            else:
                log.ODM_WARNING("Unexpected script result: %s. No cutline file has been generated." % cutline_file)
        else:
            log.ODM_WARNING("Could not generate orthophoto cutline. An error occured when running GRASS. No orthophoto will be generated.")
    else:
        log.ODM_WARNING("We've been asked to compute cutline, but either %s or %s is missing. Skipping..." % (orthophoto_file, crop_area_file))
Example #10
0
    def georeference_with_gcp(self,
                              gcp_file,
                              output_coords_file,
                              output_gcp_file,
                              rerun=False):
        if not io.file_exists(output_coords_file) or not io.file_exists(
                output_gcp_file) or rerun:
            gcp = GCPFile(gcp_file)
            if gcp.exists():
                # Create coords file, we'll be using this later
                # during georeferencing
                with open(output_coords_file, 'w') as f:
                    coords_header = gcp.wgs84_utm_zone()
                    f.write(coords_header + "\n")
                    log.ODM_INFO("Generated coords file from GCP: %s" %
                                 coords_header)

                # Convert GCP file to a UTM projection since the rest of the pipeline
                # does not handle other SRS well.
                rejected_entries = []
                utm_gcp = GCPFile(
                    gcp.create_utm_copy(
                        output_gcp_file,
                        filenames=[p.filename for p in self.photos],
                        rejected_entries=rejected_entries,
                        include_extras=False))

                if not utm_gcp.exists():
                    raise RuntimeError(
                        "Could not project GCP file to UTM. Please double check your GCP file for mistakes."
                    )

                for re in rejected_entries:
                    log.ODM_WARNING("GCP line ignored (image not found): %s" %
                                    str(re))

                if utm_gcp.entries_count() > 0:
                    log.ODM_INFO(
                        "%s GCP points will be used for georeferencing" %
                        utm_gcp.entries_count())
                else:
                    raise RuntimeError(
                        "A GCP file was provided, but no valid GCP entries could be used. Note that the GCP file is case sensitive (\".JPG\" is not the same as \".jpg\")."
                    )

                self.gcp = utm_gcp
            else:
                log.ODM_WARNING("GCP file does not exist: %s" % gcp_file)
                return
        else:
            log.ODM_INFO("Coordinates file already exist: %s" %
                         output_coords_file)
            log.ODM_INFO("GCP file already exist: %s" % output_gcp_file)
            self.gcp = GCPFile(output_gcp_file)

        self.georef = ODM_GeoRef.FromCoordsFile(output_coords_file)
        return self.georef
Example #11
0
def compute_cutline(orthophoto_file,
                    crop_area_file,
                    destination,
                    max_concurrency=1,
                    tmpdir=None):
    if io.file_exists(orthophoto_file) and io.file_exists(crop_area_file):
        from opendm.grass_engine import grass
        log.ODM_DEBUG("Computing cutline")

        if tmpdir and not io.dir_exists(tmpdir):
            system.mkdir_p(tmpdir)

        try:
            ortho_width, ortho_height = get_image_size.get_image_size(
                orthophoto_file)
            log.ODM_DEBUG("Orthophoto dimensions are %sx%s" %
                          (ortho_width, ortho_height))
            number_lines = int(
                max(8, math.ceil(min(ortho_width, ortho_height) / 256.0)))
        except get_image_size.UnknownImageFormat:
            log.ODM_DEBUG(
                "Cannot compute orthophoto dimensions, setting arbitrary number of lines."
            )
            number_lines = 32

        log.ODM_DEBUG("Number of lines: %s" % number_lines)

        gctx = grass.create_context({'auto_cleanup': False, 'tmpdir': tmpdir})
        gctx.add_param('orthophoto_file', orthophoto_file)
        gctx.add_param('crop_area_file', crop_area_file)
        gctx.add_param('number_lines', number_lines)
        gctx.add_param('max_concurrency', max_concurrency)
        gctx.add_param('memory', int(concurrency.get_max_memory_mb(300)))
        gctx.set_location(orthophoto_file)

        cutline_file = gctx.execute(
            os.path.join("opendm", "grass", "compute_cutline.grass"))
        if cutline_file != 'error':
            if io.file_exists(cutline_file):
                shutil.move(cutline_file, destination)
                log.ODM_INFO("Generated cutline file: %s --> %s" %
                             (cutline_file, destination))
                gctx.cleanup()
                return destination
            else:
                log.ODM_WARNING(
                    "Unexpected script result: %s. No cutline file has been generated."
                    % cutline_file)
        else:
            log.ODM_WARNING(
                "Could not generate orthophoto cutline. An error occured when running GRASS. No orthophoto will be generated."
            )
    else:
        log.ODM_WARNING(
            "We've been asked to compute cutline, but either %s or %s is missing. Skipping..."
            % (orthophoto_file, crop_area_file))
Example #12
0
    def georeference_with_gcp(self, gcp_file, output_coords_file, output_gcp_file, output_model_txt_geo, rerun=False):
        if not io.file_exists(output_coords_file) or not io.file_exists(output_gcp_file) or rerun:
            gcp = GCPFile(gcp_file)
            if gcp.exists():
                if gcp.entries_count() == 0:
                    raise RuntimeError("This GCP file does not have any entries. Are the entries entered in the proper format?")

                # Convert GCP file to a UTM projection since the rest of the pipeline
                # does not handle other SRS well.
                rejected_entries = []
                utm_gcp = GCPFile(gcp.create_utm_copy(output_gcp_file, filenames=[p.filename for p in self.photos], rejected_entries=rejected_entries, include_extras=False))
                
                if not utm_gcp.exists():
                    raise RuntimeError("Could not project GCP file to UTM. Please double check your GCP file for mistakes.")
                
                for re in rejected_entries:
                    log.ODM_WARNING("GCP line ignored (image not found): %s" % str(re))
                
                if utm_gcp.entries_count() > 0:
                    log.ODM_INFO("%s GCP points will be used for georeferencing" % utm_gcp.entries_count())
                else:
                    raise RuntimeError("A GCP file was provided, but no valid GCP entries could be used. Note that the GCP file is case sensitive (\".JPG\" is not the same as \".jpg\").")
                
                self.gcp = utm_gcp

                # Compute RTC offsets from GCP points
                x_pos = [p.x for p in utm_gcp.iter_entries()]
                y_pos = [p.y for p in utm_gcp.iter_entries()]
                x_off, y_off = int(np.round(np.mean(x_pos))), int(np.round(np.mean(y_pos)))

                # Create coords file, we'll be using this later
                # during georeferencing
                with open(output_coords_file, 'w') as f:
                    coords_header = gcp.wgs84_utm_zone()
                    f.write(coords_header + "\n")
                    f.write("{} {}\n".format(x_off, y_off))
                    log.ODM_INFO("Generated coords file from GCP: %s" % coords_header)
                
                # Deprecated: This is mostly for backward compatibility and should be
                # be removed at some point
                shutil.copyfile(output_coords_file, output_model_txt_geo)
                log.ODM_INFO("Wrote %s" % output_model_txt_geo)
            else:
                log.ODM_WARNING("GCP file does not exist: %s" % gcp_file)
                return
        else:
            log.ODM_INFO("Coordinates file already exist: %s" % output_coords_file)
            log.ODM_INFO("GCP file already exist: %s" % output_gcp_file)
            self.gcp = GCPFile(output_gcp_file)
        
        self.georef = ODM_GeoRef.FromCoordsFile(output_coords_file)
        return self.georef
Example #13
0
 def export_bundler(self, destination_bundle_file, rerun=False):
     if not io.file_exists(destination_bundle_file) or rerun:
             # convert back to bundler's format
             system.run('%s/bin/export_bundler %s' %
                     (context.opensfm_path, self.opensfm_project_path))
     else:
         log.ODM_WARNING('Found a valid Bundler file in: %s' % destination_bundle_file)
Example #14
0
    def process(self, args, outputs):
        tree = outputs['tree']
        reconstruction = outputs['reconstruction']

        if not os.path.exists(tree.odm_filterpoints):
            system.mkdir_p(tree.odm_filterpoints)

        # check if reconstruction was done before
        if not io.file_exists(tree.filtered_point_cloud) or self.rerun():
            if args.fast_orthophoto:
                inputPointCloud = os.path.join(tree.opensfm,
                                               'reconstruction.ply')
            elif args.use_opensfm_dense:
                inputPointCloud = tree.opensfm_model
            else:
                inputPointCloud = tree.mve_model

            point_cloud.filter(inputPointCloud,
                               tree.filtered_point_cloud,
                               standard_deviation=args.pc_filter,
                               confidence=None,
                               sample_radius=args.pc_sample,
                               verbose=args.verbose)

        else:
            log.ODM_WARNING('Found a valid point cloud file in: %s' %
                            tree.filtered_point_cloud)

        if args.optimize_disk_space:
            os.remove(inputPointCloud)
Example #15
0
File: osfm.py Project: utsingh/ODM
    def save_absolute_image_list_to(self, file):
        """
        Writes a copy of the image_list.txt file and makes sure that all paths
        written in it are absolute paths and not relative paths.
        """
        image_list_file = self.path("image_list.txt")

        if io.file_exists(image_list_file):
            with open(image_list_file, 'r') as f:
                content = f.read()

            lines = []
            for line in map(str.strip, content.split('\n')):
                if line and not line.startswith("/"):
                    line = os.path.abspath(
                        os.path.join(self.opensfm_project_path, line))
                lines.append(line)

            with open(file, 'w') as f:
                f.write("\n".join(lines))

            log.ODM_INFO("Wrote %s with absolute paths" % file)
        else:
            log.ODM_WARNING("No %s found, cannot create %s" %
                            (image_list_file, file))
Example #16
0
def add_pseudo_georeferencing(geotiff):
    if not io.file_exists(geotiff):
        log.ODM_WARNING("Cannot add pseudo georeferencing, %s does not exist" %
                        geotiff)
        return

    try:
        log.ODM_INFO(
            "Adding pseudo georeferencing (raster should show up at the equator) to %s"
            % geotiff)

        dst_ds = gdal.Open(geotiff, GA_Update)
        srs = osr.SpatialReference()
        srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
        srs.ImportFromProj4(get_pseudogeo_utm())
        dst_ds.SetProjection(srs.ExportToWkt())
        dst_ds.SetGeoTransform([
            0.0,
            get_pseudogeo_scale(), 0.0, 0.0, 0.0, -get_pseudogeo_scale()
        ])
        dst_ds = None

    except Exception as e:
        log.ODM_WARNING(
            "Cannot add psuedo georeferencing to %s (%s), skipping..." %
            (geotiff, str(e)))
Example #17
0
    def process(self, args, outputs):
        tree = outputs['tree']
        reconstruction = outputs['reconstruction']

        if not os.path.exists(tree.odm_report): system.mkdir_p(tree.odm_report)

        shots_geojson = os.path.join(tree.odm_report, "shots.geojson")
        if not io.file_exists(shots_geojson) or self.rerun():
            # Extract geographical camera shots
            if reconstruction.is_georeferenced():
                shots = get_geojson_shots_from_opensfm(
                    tree.opensfm_reconstruction, tree.opensfm_transformation,
                    reconstruction.get_proj_srs())
            else:
                # Pseudo geo
                shots = get_geojson_shots_from_opensfm(
                    tree.opensfm_reconstruction,
                    pseudo_geotiff=tree.odm_orthophoto_tif)

            if shots:
                with open(shots_geojson, "w") as fout:
                    fout.write(json.dumps(shots))

                log.ODM_INFO("Wrote %s" % shots_geojson)
            else:
                log.ODM_WARNING("Cannot extract shots")
        else:
            log.ODM_WARNING('Found a valid shots file in: %s' %
                            tree.shots_geojson)
Example #18
0
    def ground_control_points(self, proj4):
        """
        Load ground control point information.
        """
        gcp_stats_file = self.path("stats", "ground_control_points.json")

        if not io.file_exists(gcp_stats_file):
            return []

        gcps_stats = {}
        try:
            with open(gcp_stats_file) as f:
                gcps_stats = json.loads(f.read())
        except:
            log.ODM_INFO("Cannot parse %s" % gcp_stats_file)

        if not gcps_stats:
            return []

        ds = DataSet(self.opensfm_project_path)
        reference = ds.load_reference()
        projection = pyproj.Proj(proj4)

        result = []
        for gcp in gcps_stats:
            geocoords = _transform(gcp['coordinates'], reference, projection)
            result.append({
                'id': gcp['id'],
                'observations': gcp['observations'],
                'coordinates': geocoords,
                'error': gcp['error']
            })

        return result
Example #19
0
def resize(src_dir, target_dir, resize_to, rerun_cell, photo):
    # define image paths
    path_file = photo.path_file
    new_path_file = io.join_paths(target_dir, photo.filename)
    # set raw image path in case we want to rerun cell
    if io.file_exists(new_path_file) and rerun_cell:
        path_file = io.join_paths(src_dir, photo.filename)

    if not io.file_exists(new_path_file) or rerun_cell:
        # open and resize image with opencv
        img = cv2.imread(path_file)
        # compute new size
        max_side = max(img.shape[0], img.shape[1])
        if max_side <= resize_to:
            log.ODM_WARNING(
                'Resize parameter is greater than or equal to the largest side of the image'
            )
        ratio = float(resize_to) / float(max_side)
        img_r = cv2.resize(img, None, fx=ratio, fy=ratio)
        # write image with opencv
        cv2.imwrite(new_path_file, img_r)
        # read metadata with pyexiv2
        old_meta = pyexiv2.ImageMetadata(path_file)
        new_meta = pyexiv2.ImageMetadata(new_path_file)
        old_meta.read()
        new_meta.read()
        # copy metadata
        old_meta.copy(new_meta)
        # update metadata size
        new_meta['Exif.Photo.PixelXDimension'] = img_r.shape[0]
        new_meta['Exif.Photo.PixelYDimension'] = img_r.shape[1]
        new_meta.write()
        # update photos array with new values
        photo.path_file = new_path_file
        photo.width = img_r.shape[0]
        photo.height = img_r.shape[1]
        photo.update_focal()

        # log message
        log.ODM_DEBUG('Resized %s | dimensions: %s' %
                      (photo.filename, img_r.shape))
    else:
        # log message
        log.ODM_WARNING('Already resized %s | dimensions: %s x %s' %
                        (photo.filename, photo.width, photo.height))

    return photo
Example #20
0
    def process(self, args, outputs):
        tree = outputs['tree']
        reconstruction = outputs['reconstruction']

        log.ODM_INFO("Post Processing")

        if not outputs['large']:
            # TODO: support for split-merge?

            # Embed GCP info in 2D results via
            # XML metadata fields
            gcp_gml_export_file = tree.path("odm_georeferencing",
                                            "ground_control_points.gml")

            if reconstruction.has_gcp() and io.file_exists(
                    gcp_gml_export_file):
                skip_embed_gcp = False
                gcp_xml = ""

                with open(gcp_gml_export_file) as f:
                    gcp_xml = f.read()

                for product in [
                        tree.odm_orthophoto_tif,
                        tree.path("odm_dem", "dsm.tif"),
                        tree.path("odm_dem", "dtm.tif")
                ]:
                    if os.path.isfile(product):
                        ds = gdal.Open(product)
                        if ds is not None:
                            if ds.GetMetadata('xml:GROUND_CONTROL_POINTS'
                                              ) is None or self.rerun():
                                ds.SetMetadata(gcp_xml,
                                               'xml:GROUND_CONTROL_POINTS')
                                ds = None
                                log.ODM_INFO(
                                    "Wrote xml:GROUND_CONTROL_POINTS metadata to %s"
                                    % product)
                            else:
                                skip_embed_gcp = True
                                log.ODM_WARNING(
                                    "Already embedded ground control point information"
                                )
                                break
                        else:
                            log.ODM_WARNING(
                                "Cannot open %s for writing, skipping GCP embedding"
                                % product)

        if args.copy_to:
            try:
                copy_paths([
                    os.path.join(args.project_path, p)
                    for p in get_processing_results_paths()
                ], args.copy_to, self.rerun())
            except Exception as e:
                log.ODM_WARNING("Cannot copy to %s: %s" %
                                (args.copy_to, str(e)))
Example #21
0
def resize(src_dir, target_dir, resize_to, rerun_cell, photo):
    # define image paths
    path_file = photo.path_file
    new_path_file = io.join_paths(target_dir, photo.filename)
    # set raw image path in case we want to rerun cell
    if io.file_exists(new_path_file) and rerun_cell:
        path_file = io.join_paths(src_dir, photo.filename)

    if not io.file_exists(new_path_file) or rerun_cell:
        # open and resize image with opencv
        img = cv2.imread(path_file)
        # compute new size
        max_side = max(img.shape[0], img.shape[1])
        if max_side <= resize_to:
            log.ODM_WARNING('Resize parameter is greater than or equal to the largest side of the image')
        ratio = float(resize_to) / float(max_side)
        img_r = cv2.resize(img, None, fx=ratio, fy=ratio)
        # write image with opencv
        cv2.imwrite(new_path_file, img_r)
        # read metadata with pyexiv2
        old_meta = pyexiv2.ImageMetadata(path_file)
        new_meta = pyexiv2.ImageMetadata(new_path_file)
        old_meta.read()
        new_meta.read()
        # copy metadata
        old_meta.copy(new_meta)
        # update metadata size
        new_meta['Exif.Photo.PixelXDimension'] = img_r.shape[1]
        new_meta['Exif.Photo.PixelYDimension'] = img_r.shape[0]
        new_meta.write()
        # update photos array with new values
        photo.path_file = new_path_file
        photo.width = img_r.shape[1]
        photo.height = img_r.shape[0]
        photo.update_focal()

        # log message
        log.ODM_DEBUG('Resized %s | dimensions: %s' %
                      (photo.filename, img_r.shape))
    else:
        # log message
        log.ODM_WARNING('Already resized %s | dimensions: %s x %s' %
                        (photo.filename, photo.width, photo.height))

    return photo
Example #22
0
    def create_tracks(self, rerun=False):
        tracks_file = os.path.join(self.opensfm_project_path, 'tracks.csv')
        rs_file = self.path('rs_done.txt')

        if not io.file_exists(tracks_file) or rerun:
            self.run('create_tracks')
        else:
            log.ODM_WARNING('Found a valid OpenSfM tracks file in: %s' %
                            tracks_file)
Example #23
0
    def extract_offsets(self, geo_sys_file):
        if not io.file_exists(geo_sys_file):
            log.ODM_ERROR('Could not find file %s' % geo_sys_file)
            return

        with open(geo_sys_file) as f:
            offsets = f.readlines()[1].split(' ')
            self.utm_east_offset = float(offsets[0])
            self.utm_north_offset = float(offsets[1])
Example #24
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM Meshing Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree
        verbose = '-verbose' if self.params.verbose else ''

        # define paths and create working directories
        system.mkdir_p(tree.odm_meshing)

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'odm_meshing') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'odm_meshing' in args.rerun_from)

        if not io.file_exists(tree.odm_mesh) or rerun_cell:
            log.ODM_DEBUG('Writing ODM Mesh file in: %s' % tree.odm_mesh)

            kwargs = {
                'bin': context.odm_modules_path,
                'outfile': tree.odm_mesh,
                'log': tree.odm_meshing_log,
                'max_vertex': self.params.max_vertex,
                'oct_tree': self.params.oct_tree,
                'samples': self.params.samples,
                'solver': self.params.solver,
                'verbose': verbose
            }
            if not args.use_pmvs:
                kwargs['infile'] = tree.opensfm_model
            else:
                kwargs['infile'] = tree.pmvs_model

            # run meshing binary
            system.run(
                '{bin}/odm_meshing -inputFile {infile} '
                '-outputFile {outfile} -logFile {log} '
                '-maxVertexCount {max_vertex} -octreeDepth {oct_tree} {verbose} '
                '-samplesPerNode {samples} -solverDivide {solver}'.format(
                    **kwargs))
        else:
            log.ODM_WARNING('Found a valid ODM Mesh file in: %s' %
                            tree.odm_mesh)

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Meshing')

        log.ODM_INFO('Running ODM Meshing Cell - Finished')
        return ecto.OK if args.end_with != 'odm_meshing' else ecto.QUIT
Example #25
0
    def configure(self, p, _i, _o):
        tree = types.ODM_Tree(p.args.project_path)
        self.tree = ecto.Constant(value=tree)

        # TODO(dakota) put this somewhere better maybe
        if p.args.time and io.file_exists(tree.benchmarking):
            # Delete the previously made file
            os.remove(tree.benchmarking)
            with open(tree.benchmarking, 'a') as b:
                b.write('ODM Benchmarking file created %s\nNumber of Cores: %s\n\n' % (system.now(), context.num_cores))
Example #26
0
    def configure(self, p, _i, _o):
        tree = types.ODM_Tree(p.args.project_path, p.args.images)
        self.tree = ecto.Constant(value=tree)

        # TODO(dakota) put this somewhere better maybe
        if p.args.time and io.file_exists(tree.benchmarking):
            # Delete the previously made file
            os.remove(tree.benchmarking)
            with open(tree.benchmarking, 'a') as b:
                b.write('ODM Benchmarking file created %s\nNumber of Cores: %s\n\n' % (system.now(), context.num_cores))
Example #27
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM Texturing Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree

        # define paths and create working directories
        system.mkdir_p(tree.odm_texturing)

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'odm_texturing') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'odm_texturing' in args.rerun_from)

        if not io.file_exists(tree.odm_textured_model_obj) or rerun_cell:
            log.ODM_DEBUG('Writing ODM Textured file in: %s' %
                          tree.odm_textured_model_obj)

            # odm_texturing definitions
            kwargs = {
                'bin': context.odm_modules_path,
                'out_dir': tree.odm_texturing,
                'bundle': tree.opensfm_bundle,
                'imgs_path': tree.dataset_resize,
                'imgs_list': tree.opensfm_bundle_list,
                'model': tree.odm_mesh,
                'log': tree.odm_texuring_log,
                'resize': self.params.resize,
                'resolution': self.params.resolution,
                'size': self.params.size
            }

            # run texturing binary
            system.run(
                '{bin}/odm_texturing -bundleFile {bundle} '
                '-imagesPath {imgs_path} -imagesListPath {imgs_list} '
                '-inputModelPath {model} -outputFolder {out_dir}/ '
                '-textureResolution {resolution} -bundleResizedTo {resize} '
                '-textureWithSize {size} -logFile {log}'.format(**kwargs))
        else:
            log.ODM_WARNING('Found a valid ODM Texture file in: %s' %
                            tree.odm_textured_model_obj)

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Texturing')

        log.ODM_INFO('Running ODM Texturing Cell - Finished')
        return ecto.OK if args.end_with != 'odm_texturing' else ecto.QUIT
Example #28
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM Meshing Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree
        verbose = '-verbose' if self.params.verbose else ''

        # define paths and create working directories
        system.mkdir_p(tree.odm_meshing)

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'odm_meshing') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'odm_meshing' in args.rerun_from)

        if not io.file_exists(tree.odm_mesh) or rerun_cell:
            log.ODM_DEBUG('Writing ODM Mesh file in: %s' % tree.odm_mesh)

            kwargs = {
                'bin': context.odm_modules_path,
                'outfile': tree.odm_mesh,
                'log': tree.odm_meshing_log,
                'max_vertex': self.params.max_vertex,
                'oct_tree': self.params.oct_tree,
                'samples': self.params.samples,
                'solver': self.params.solver,
                'verbose': verbose
            }
            if not args.use_pmvs:
                kwargs['infile'] = tree.opensfm_model
            else:
                kwargs['infile'] = tree.pmvs_model

            # run meshing binary
            system.run('{bin}/odm_meshing -inputFile {infile} '
                       '-outputFile {outfile} -logFile {log} '
                       '-maxVertexCount {max_vertex} -octreeDepth {oct_tree} {verbose} '
                       '-samplesPerNode {samples} -solverDivide {solver}'.format(**kwargs))
        else:
            log.ODM_WARNING('Found a valid ODM Mesh file in: %s' %
                            tree.odm_mesh)

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Meshing')

        log.ODM_INFO('Running ODM Meshing Cell - Finished')
        return ecto.OK if args.end_with != 'odm_meshing' else ecto.QUIT
Example #29
0
def fast_merge_ply(input_point_cloud_files, output_file):
    # Assumes that all input files share the same header/content format
    # As the merge is a naive byte stream copy

    num_files = len(input_point_cloud_files)
    if num_files == 0:
        log.ODM_WARNING("No input point cloud files to process")
        return

    if io.file_exists(output_file):
        log.ODM_WARNING("Removing previous point cloud: %s" % output_file)
        os.remove(output_file)

    vertex_count = sum(
        [ply_info(pcf)['vertex_count'] for pcf in input_point_cloud_files])
    master_file = input_point_cloud_files[0]
    with open(output_file, "wb") as out:
        with open(master_file, "r", errors="ignore") as fhead:
            # Copy header
            line = fhead.readline()
            out.write(line.encode('utf8'))

            i = 0
            while line.strip().lower() != "end_header":
                line = fhead.readline()

                # Intercept element vertex field
                if line.lower().startswith("element vertex "):
                    out.write(
                        ("element vertex %s\n" % vertex_count).encode('utf8'))
                else:
                    out.write(line.encode('utf8'))

                i += 1
                if i > 100:
                    raise IOError("Cannot find end_header field. Invalid PLY?")

        for ipc in input_point_cloud_files:
            i = 0
            with open(ipc, "rb") as fin:
                # Skip header
                line = fin.readline()
                while line.strip().lower() != b"end_header":
                    line = fin.readline()

                    i += 1
                    if i > 100:
                        raise IOError(
                            "Cannot find end_header field. Invalid PLY?")

                # Write fields
                out.write(fin.read())

    return output_file
Example #30
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM Texturing Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree

        # define paths and create working directories
        system.mkdir_p(tree.odm_texturing)

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'odm_texturing') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'odm_texturing' in args.rerun_from)

        if not io.file_exists(tree.odm_textured_model_obj) or rerun_cell:
            log.ODM_DEBUG('Writing ODM Textured file in: %s'
                          % tree.odm_textured_model_obj)

            # odm_texturing definitions
            kwargs = {
                'bin': context.odm_modules_path,
                'out_dir': tree.odm_texturing,
                'bundle': tree.opensfm_bundle,
                'imgs_path': tree.dataset_resize,
                'imgs_list': tree.opensfm_bundle_list,
                'model': tree.odm_mesh,
                'log': tree.odm_texuring_log,
                'resize': self.params.resize,
                'resolution': self.params.resolution,
                'size': self.params.size
            }

            # run texturing binary
            system.run('{bin}/odm_texturing -bundleFile {bundle} '
                       '-imagesPath {imgs_path} -imagesListPath {imgs_list} '
                       '-inputModelPath {model} -outputFolder {out_dir}/ '
                       '-textureResolution {resolution} -bundleResizedTo {resize} '
                       '-textureWithSize {size} -logFile {log}'.format(**kwargs))
        else:
            log.ODM_WARNING('Found a valid ODM Texture file in: %s'
                            % tree.odm_textured_model_obj)

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Texturing')

        log.ODM_INFO('Running ODM Texturing Cell - Finished')
        return ecto.OK if args.end_with != 'odm_texturing' else ecto.QUIT
Example #31
0
File: types.py Project: sgrchen/ODM
    def georeference_with_gps(self, images_path, output_coords_file, rerun=False):
        try:
            if not io.file_exists(output_coords_file) or rerun:
                location.extract_utm_coords(self.photos, images_path, output_coords_file)
            else:
                log.ODM_INFO("Coordinates file already exist: %s" % output_coords_file)
            
            self.georef = ODM_GeoRef.FromCoordsFile(output_coords_file)
        except:
            log.ODM_WARNING('Could not generate coordinates file. The orthophoto will not be georeferenced.')

        self.gcp = GCPFile(None)
        return self.georef
Example #32
0
    def align_reconstructions(self, rerun):
        alignment_file = self.path('alignment_done.txt')
        if not io.file_exists(alignment_file) or rerun:
            log.ODM_INFO("Aligning submodels...")
            meta_data = metadataset.MetaDataSet(self.opensfm_project_path)
            reconstruction_shots = tools.load_reconstruction_shots(meta_data)
            transformations = tools.align_reconstructions(reconstruction_shots, use_points_constraints=False)
            tools.apply_transformations(transformations)

            with open(alignment_file, 'w') as fout:
                fout.write("Alignment done!\n")
        else:
            log.ODM_WARNING('Found a alignment done progress file in: %s' % alignment_file)
Example #33
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running OMD PMVS Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'pmvs') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'pmvs' in args.rerun_from)

        if not io.file_exists(tree.pmvs_model) or rerun_cell:
            log.ODM_DEBUG('Creating dense pointcloud in: %s' % tree.pmvs_model)

            kwargs = {
                'bin': context.cmvs_opts_path,
                'prefix': tree.pmvs_rec_path,
                'level': self.params.level,
                'csize': self.params.csize,
                'thresh': self.params.thresh,
                'wsize': self.params.wsize,
                'min_imgs': self.params.min_imgs,
                'cores': self.params.cores
            }

            # generate pmvs2 options
            system.run('{bin} {prefix}/ {level} {csize} {thresh} {wsize} '
                       '{min_imgs} {cores}'.format(**kwargs))

            # run pmvs2
            system.run('%s %s/ option-0000' %
                       (context.pmvs2_path, tree.pmvs_rec_path))

        else:
            log.ODM_WARNING('Found a valid PMVS file in %s' % tree.pmvs_model)

        outputs.reconstruction = inputs.reconstruction
        
        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'PMVS')

        log.ODM_INFO('Running ODM PMVS Cell - Finished')
        return ecto.OK if args.end_with != 'pmvs' else ecto.QUIT
Example #34
0
    def align_reconstructions(self, rerun):
        alignment_file = self.path('alignment_done.txt')
        if not io.file_exists(alignment_file) or rerun:
            log.ODM_INFO("Aligning submodels...")
            meta_data = metadataset.MetaDataSet(self.opensfm_project_path)
            reconstruction_shots = tools.load_reconstruction_shots(meta_data)
            transformations = tools.align_reconstructions(
                reconstruction_shots, tools.partial_reconstruction_name, False)
            tools.apply_transformations(transformations)

            self.touch(alignment_file)
        else:
            log.ODM_WARNING('Found a alignment done progress file in: %s' %
                            alignment_file)
Example #35
0
    def process(self, inputs, outputs):
        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM FilterPoints Cell')

        # get inputs
        tree = inputs.tree
        args = inputs.args
        reconstruction = inputs.reconstruction

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'odm_filterpoints') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'odm_filterpoints' in args.rerun_from)
        if not os.path.exists(tree.odm_filterpoints):
            system.mkdir_p(tree.odm_filterpoints)

        # check if reconstruction was done before
        if not io.file_exists(tree.filtered_point_cloud) or rerun_cell:
            if args.fast_orthophoto:
                inputPointCloud = os.path.join(tree.opensfm,
                                               'reconstruction.ply')
            elif args.use_opensfm_dense:
                inputPointCloud = tree.opensfm_model
            else:
                inputPointCloud = tree.mve_model

            confidence = None
            if not args.use_opensfm_dense and not args.fast_orthophoto:
                confidence = args.mve_confidence

            point_cloud.filter(inputPointCloud,
                               tree.filtered_point_cloud,
                               standard_deviation=args.pc_filter,
                               confidence=confidence,
                               verbose=args.verbose)
        else:
            log.ODM_WARNING('Found a valid point cloud file in: %s' %
                            tree.filtered_point_cloud)

        outputs.reconstruction = reconstruction

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'MVE')

        log.ODM_INFO('Running ODM FilterPoints Cell - Finished')
        return ecto.OK if args.end_with != 'odm_filterpoints' else ecto.QUIT
Example #36
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running OMD PMVS Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'pmvs') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'pmvs' in args.rerun_from)

        if not io.file_exists(tree.pmvs_model) or rerun_cell:
            log.ODM_DEBUG('Creating dense pointcloud in: %s' % tree.pmvs_model)

            kwargs = {
                'bin': context.cmvs_opts_path,
                'prefix': tree.pmvs_rec_path,
                'level': self.params.level,
                'csize': self.params.csize,
                'thresh': self.params.thresh,
                'wsize': self.params.wsize,
                'min_imgs': self.params.min_imgs,
                'cores': self.params.cores
            }

            # generate pmvs2 options
            system.run('{bin} {prefix}/ {level} {csize} {thresh} {wsize} '
                       '{min_imgs} {cores}'.format(**kwargs))

            # run pmvs2
            system.run('%s %s/ option-0000' %
                       (context.pmvs2_path, tree.pmvs_rec_path))

        else:
            log.ODM_WARNING('Found a valid PMVS file in %s' % tree.pmvs_model)

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'PMVS')

        log.ODM_INFO('Running ODM PMVS Cell - Finished')
        return ecto.OK if args.end_with != 'pmvs' else ecto.QUIT
    def process(self, inputs, outputs):
        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM FilterPoints Cell')

        # get inputs
        tree = inputs.tree
        args = inputs.args
        reconstruction = inputs.reconstruction

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'odm_filterpoints') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'odm_filterpoints' in args.rerun_from)
        if not os.path.exists(tree.odm_filterpoints): system.mkdir_p(tree.odm_filterpoints)

        # check if reconstruction was done before
        if not io.file_exists(tree.filtered_point_cloud) or rerun_cell:
            if args.fast_orthophoto:
                inputPointCloud = os.path.join(tree.opensfm, 'reconstruction.ply')
            elif args.use_opensfm_dense:
                inputPointCloud = tree.opensfm_model
            else:
                inputPointCloud = tree.mve_model

            confidence = None
            if not args.use_opensfm_dense and not args.fast_orthophoto:
                confidence = args.mve_confidence

            point_cloud.filter(inputPointCloud, tree.filtered_point_cloud, standard_deviation=args.pc_filter, confidence=confidence, verbose=args.verbose)
        else:
            log.ODM_WARNING('Found a valid point cloud file in: %s' %
                            tree.filtered_point_cloud)

        outputs.reconstruction = reconstruction

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'MVE')

        log.ODM_INFO('Running ODM FilterPoints Cell - Finished')
        return ecto.OK if args.end_with != 'odm_filterpoints' else ecto.QUIT
Example #38
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()
        
        log.ODM_INFO('Running ODM CMVS Cell')

        # get inputs 
        args = self.inputs.args
        tree = self.inputs.tree

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'cmvs') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'cmvs' in args.rerun_from)

        if not io.file_exists(tree.pmvs_bundle) or rerun_cell:
            log.ODM_DEBUG('Writing CMVS vis in: %s' % tree.pmvs_bundle)

            # copy bundle file to pmvs dir
            from shutil import copyfile
            copyfile(tree.opensfm_bundle, 
                     tree.pmvs_bundle)

            kwargs = {
                'bin': context.cmvs_path,
                'prefix': self.inputs.tree.pmvs_rec_path,
                'max_images': self.params.max_images,
                'cores': self.params.cores
            }

            # run cmvs
            system.run('{bin} {prefix}/ {max_images} {cores}'.format(**kwargs))
        else:
            log.ODM_WARNING('Found a valid CMVS file in: %s' % 
                            tree.pmvs_bundle)

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'CMVS')

        log.ODM_INFO('Running ODM CMVS Cell - Finished')
        return ecto.OK if args.end_with != 'cmvs' else ecto.QUIT
Example #39
0
    def process(self, inputs, outputs):
        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM Georeferencing Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree
        gcpfile = io.join_paths(tree.root_path, self.params.gcp_file)

        # define paths and create working directories
        system.mkdir_p(tree.odm_georeferencing)

        # in case a gcp file it's not provided, let's try to generate it using
        # images metadata. Internally calls jhead.
        if not self.params.use_gcp and \
           not io.file_exists(tree.odm_georeferencing_coords):
            
            log.ODM_WARNING('Warning: No coordinates file. '
                            'Generating coordinates file in: %s'
                            % tree.odm_georeferencing_coords)
            try:
                # odm_georeference definitions
                kwargs = {
                    'bin': context.odm_modules_path,
                    'imgs': tree.dataset_raw,
                    'imgs_list': tree.opensfm_bundle_list,
                    'coords': tree.odm_georeferencing_coords,
                    'log': tree.odm_georeferencing_utm_log
                }

                # run UTM extraction binary
                system.run('{bin}/odm_extract_utm -imagesPath {imgs}/ '
                           '-imageListFile {imgs_list} -outputCoordFile {coords} '
                           '-logFile {log}'.format(**kwargs))

            except Exception, e:
                log.ODM_ERROR('Could not generate GCP file from images metadata.'
                              'Consider rerunning with argument --odm_georeferencing-useGcp'
                              ' and provide a proper GCP file')
                log.ODM_ERROR(e)
                return ecto.QUIT
Example #40
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM Orthophoto Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree
        verbose = '-verbose' if self.params.verbose else ''

        # define paths and create working directories
        system.mkdir_p(tree.odm_orthophoto)

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'odm_orthophoto') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'odm_orthophoto' in args.rerun_from)

        if not io.file_exists(tree.odm_orthophoto_file) or rerun_cell:

            # odm_orthophoto definitions
            kwargs = {
                'bin': context.odm_modules_path,
                'log': tree.odm_orthophoto_log,
                'ortho': tree.odm_orthophoto_file,
                'corners': tree.odm_orthophoto_corners,
                'res': self.params.resolution,
                'verbose': verbose
            }

            kwargs['model_geo'] = tree.odm_georeferencing_model_obj_geo \
                if io.file_exists(tree.odm_georeferencing_coords) \
                else tree.odm_textured_model_obj


            # run odm_orthophoto
            system.run('{bin}/odm_orthophoto -inputFile {model_geo} '
                       '-logFile {log} -outputFile {ortho} -resolution {res} {verbose} '
                       '-outputCornerFile {corners}'.format(**kwargs))

            if not io.file_exists(tree.odm_georeferencing_coords):
                log.ODM_WARNING('No coordinates file. A georeferenced raster '
                                'will not be created')
            else:
                # Create georeferenced GeoTiff
                geotiffcreated = False
                georef = types.ODM_GeoRef()
                # creates the coord refs # TODO I don't want to have to do this twice- after odm_georef
                georef.parse_coordinate_system(tree.odm_georeferencing_coords)

                if georef.epsg and georef.utm_east_offset and georef.utm_north_offset:
                    ulx = uly = lrx = lry = 0.0
                    with open(tree.odm_orthophoto_corners) as f:
                        for lineNumber, line in enumerate(f):
                            if lineNumber == 0:
                                tokens = line.split(' ')
                                if len(tokens) == 4:
                                    ulx = float(tokens[0]) + \
                                        float(georef.utm_east_offset)
                                    lry = float(tokens[1]) + \
                                        float(georef.utm_north_offset)
                                    lrx = float(tokens[2]) + \
                                        float(georef.utm_east_offset)
                                    uly = float(tokens[3]) + \
                                        float(georef.utm_north_offset)
                    log.ODM_INFO('Creating GeoTIFF')

                    kwargs = {
                        'ulx': ulx,
                        'uly': uly,
                        'lrx': lrx,
                        'lry': lry,
                        'tiled': '' if self.params.no_tiled else '-co TILED=yes ',
                        'compress': self.params.compress,
                        'predictor': '-co PREDICTOR=2 ' if self.params.compress in
                                                           ['LZW', 'DEFLATE'] else '',
                        'epsg': georef.epsg,
                        't_srs': self.params.t_srs or "EPSG:{0}".format(georef.epsg),
                        'bigtiff': self.params.bigtiff,
                        'png': tree.odm_orthophoto_file,
                        'tiff': tree.odm_orthophoto_tif,
                        'log': tree.odm_orthophoto_tif_log
                    }

                    system.run('gdal_translate -a_ullr {ulx} {uly} {lrx} {lry} '
                               '{tiled} '
                               '-co BIGTIFF={bigtiff} '
                               '-co COMPRESS={compress} '
                               '{predictor} '
                               '-co BLOCKXSIZE=512 '
                               '-co BLOCKYSIZE=512 '
                               '-co NUM_THREADS=ALL_CPUS '
                               '-a_srs \"EPSG:{epsg}\" '
                               '{png} {tiff} > {log}'.format(**kwargs))

                    if self.params.build_overviews:
                        log.ODM_DEBUG("Building Overviews")
                        kwargs = {
                            'orthophoto': tree.odm_orthophoto_tif,
                            'log': tree.odm_orthophoto_gdaladdo_log
                        }
                        # Run gdaladdo
                        system.run('gdaladdo -ro -r average '
                                   '--config BIGTIFF_OVERVIEW IF_SAFER '
                                   '--config COMPRESS_OVERVIEW JPEG '
                                   '{orthophoto} 2 4 8 16 > {log}'.format(**kwargs))

                    geotiffcreated = True
                if not geotiffcreated:
                    log.ODM_WARNING('No geo-referenced orthophoto created due '
                                    'to missing geo-referencing or corner coordinates.')

        else:
            log.ODM_WARNING('Found a valid orthophoto in: %s' % tree.odm_orthophoto_file)

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Orthophoto')

        log.ODM_INFO('Running ODM OrthoPhoto Cell - Finished')
        return ecto.OK if args.end_with != 'odm_orthophoto' else ecto.QUIT
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM Georeferencing Cell')

        # get inputs
        args = inputs.args
        tree = inputs.tree
        reconstruction = inputs.reconstruction
        gcpfile = tree.odm_georeferencing_gcp
        doPointCloudGeo = True
        transformPointCloud = True
        verbose = '-verbose' if self.params.verbose else ''
        geo_ref = reconstruction.georef

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'odm_georeferencing') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'odm_georeferencing' in args.rerun_from)

        runs = [{
            'georeferencing_dir': tree.odm_georeferencing,
            'texturing_dir': tree.odm_texturing,
            'model': os.path.join(tree.odm_texturing, tree.odm_textured_model_obj)
        }]

        if args.skip_3dmodel:
            runs = []

        if not args.use_3dmesh:
            # Make sure 2.5D mesh is georeferenced before the 3D mesh
            # Because it will be used to calculate a transform
            # for the point cloud. If we use the 3D model transform,
            # DEMs and orthophoto might not align!
            runs.insert(0, {
                    'georeferencing_dir': tree.odm_25dgeoreferencing,
                    'texturing_dir': tree.odm_25dtexturing,
                    'model': os.path.join(tree.odm_25dtexturing, tree.odm_textured_model_obj)
                })

        for r in runs:
            odm_georeferencing_model_obj_geo = os.path.join(r['texturing_dir'], tree.odm_georeferencing_model_obj_geo)
            odm_georeferencing_log = os.path.join(r['georeferencing_dir'], tree.odm_georeferencing_log)
            odm_georeferencing_transform_file = os.path.join(r['georeferencing_dir'], tree.odm_georeferencing_transform_file)
            odm_georeferencing_model_txt_geo_file = os.path.join(r['georeferencing_dir'], tree.odm_georeferencing_model_txt_geo)

            if not io.file_exists(odm_georeferencing_model_obj_geo) or \
               not io.file_exists(tree.odm_georeferencing_model_laz) or rerun_cell:

                # odm_georeference definitions
                kwargs = {
                    'bin': context.odm_modules_path,
                    'input_pc_file': tree.filtered_point_cloud,
                    'bundle': tree.opensfm_bundle,
                    'imgs': tree.dataset_raw,
                    'imgs_list': tree.opensfm_bundle_list,
                    'model': r['model'],
                    'log': odm_georeferencing_log,
                    'input_trans_file': tree.opensfm_transformation,
                    'transform_file': odm_georeferencing_transform_file,
                    'coords': tree.odm_georeferencing_coords,
                    'output_pc_file': tree.odm_georeferencing_model_laz,
                    'geo_sys': odm_georeferencing_model_txt_geo_file,
                    'model_geo': odm_georeferencing_model_obj_geo,
                    'gcp': gcpfile,
                    'verbose': verbose
                }

                if transformPointCloud:
                    kwargs['pc_params'] = '-inputPointCloudFile {input_pc_file} -outputPointCloudFile {output_pc_file}'.format(**kwargs)

                    if geo_ref and geo_ref.projection and geo_ref.projection.srs:
                        kwargs['pc_params'] += ' -outputPointCloudSrs %s' % pipes.quote(geo_ref.projection.srs)
                    else:
                        log.ODM_WARNING('NO SRS: The output point cloud will not have a SRS.')
                else:
                    kwargs['pc_params'] = ''
 
                # Check to see if the GCP file exists

                if not self.params.use_exif and (self.params.gcp_file or tree.odm_georeferencing_gcp):
                   log.ODM_INFO('Found %s' % gcpfile)
                   try:
                       system.run('{bin}/odm_georef -bundleFile {bundle} -imagesPath {imgs} -imagesListPath {imgs_list} '
                                  '-inputFile {model} -outputFile {model_geo} '
                                  '{pc_params} {verbose} '
                                  '-logFile {log} -outputTransformFile {transform_file} -georefFileOutputPath {geo_sys} -gcpFile {gcp} '
                                  '-outputCoordFile {coords}'.format(**kwargs))
                   except Exception:
                       log.ODM_EXCEPTION('Georeferencing failed. ')
                       return ecto.QUIT
                elif io.file_exists(tree.opensfm_transformation) and io.file_exists(tree.odm_georeferencing_coords):
                    log.ODM_INFO('Running georeferencing with OpenSfM transformation matrix')
                    system.run('{bin}/odm_georef -bundleFile {bundle} -inputTransformFile {input_trans_file} -inputCoordFile {coords} '
                               '-inputFile {model} -outputFile {model_geo} '
                               '{pc_params} {verbose} '
                               '-logFile {log} -outputTransformFile {transform_file} -georefFileOutputPath {geo_sys}'.format(**kwargs))
                elif io.file_exists(tree.odm_georeferencing_coords):
                    log.ODM_INFO('Running georeferencing with generated coords file.')
                    system.run('{bin}/odm_georef -bundleFile {bundle} -inputCoordFile {coords} '
                               '-inputFile {model} -outputFile {model_geo} '
                               '{pc_params} {verbose} '
                               '-logFile {log} -outputTransformFile {transform_file} -georefFileOutputPath {geo_sys}'.format(**kwargs))
                else:
                    log.ODM_WARNING('Georeferencing failed. Make sure your '
                                    'photos have geotags in the EXIF or you have '
                                    'provided a GCP file. ')
                    doPointCloudGeo = False # skip the rest of the georeferencing

                if doPointCloudGeo:
                    # update images metadata
                    geo_ref.extract_offsets(odm_georeferencing_model_txt_geo_file)
                    reconstruction.georef = geo_ref

                    # XYZ point cloud output
                    if args.pc_csv:
                        log.ODM_INFO("Creating geo-referenced CSV file (XYZ format)")
                        
                        system.run("pdal translate -i \"{}\" "
                            "-o \"{}\" "
                            "--writers.text.format=csv "
                            "--writers.text.order=\"X,Y,Z\" "
                            "--writers.text.keep_unspecified=false ".format(
                                tree.odm_georeferencing_model_laz,
                                tree.odm_georeferencing_xyz_file))
                    
                    # LAS point cloud output
                    if args.pc_las:
                        log.ODM_INFO("Creating geo-referenced LAS file")
                        
                        system.run("pdal translate -i \"{}\" "
                            "-o \"{}\" ".format(
                                tree.odm_georeferencing_model_laz,
                                tree.odm_georeferencing_model_las))
                    
                    if args.crop > 0:
                        log.ODM_INFO("Calculating cropping area and generating bounds shapefile from point cloud")
                        cropper = Cropper(tree.odm_georeferencing, 'odm_georeferenced_model')
                        
                        decimation_step = 40 if args.fast_orthophoto or args.use_opensfm_dense else 90
                        
                        # More aggressive decimation for large datasets
                        if not args.fast_orthophoto:
                            decimation_step *= int(len(reconstruction.photos) / 1000) + 1

                        cropper.create_bounds_shapefile(tree.odm_georeferencing_model_laz, args.crop, 
                                                    decimation_step=decimation_step,
                                                    outlier_radius=20 if args.fast_orthophoto else 2)

                    # Do not execute a second time, since
                    # We might be doing georeferencing for
                    # multiple models (3D, 2.5D, ...)
                    doPointCloudGeo = False
                    transformPointCloud = False
            else:
                log.ODM_WARNING('Found a valid georeferenced model in: %s'
                                % tree.odm_georeferencing_model_laz)

        outputs.reconstruction = reconstruction

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Georeferencing')

        log.ODM_INFO('Running ODM Georeferencing Cell - Finished')
        return ecto.OK if args.end_with != 'odm_georeferencing' else ecto.QUIT
Example #42
0
    def process(self, inputs, outputs):
        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM DEM Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree
        las_model_found = io.file_exists(tree.odm_georeferencing_model_las)
        env_paths = [context.superbuild_bin_path]

        # Just to make sure
        l2d_module_installed = True
        try:
            system.run('l2d_classify --help > /dev/null', env_paths)
        except:
            log.ODM_WARNING('lidar2dems is not installed properly')
            l2d_module_installed = False

        log.ODM_INFO('Create DSM: ' + str(args.dsm))
        log.ODM_INFO('Create DTM: ' + str(args.dtm))
        log.ODM_INFO('DEM input file {0} found: {1}'.format(tree.odm_georeferencing_model_las, str(las_model_found)))

        # Do we need to process anything here?
        if (args.dsm or args.dtm) and las_model_found and l2d_module_installed:

            # define paths and create working directories
            odm_dem_root = tree.path('odm_dem')
            system.mkdir_p(odm_dem_root)

            dsm_output_filename = os.path.join(odm_dem_root, 'dsm.tif')
            dtm_output_filename = os.path.join(odm_dem_root, 'dtm.tif')

            # check if we rerun cell or not
            rerun_cell = (args.rerun is not None and
                          args.rerun == 'odm_dem') or \
                         (args.rerun_all) or \
                         (args.rerun_from is not None and
                          'odm_dem' in args.rerun_from)

            if (args.dtm and not io.file_exists(dtm_output_filename)) or \
                (args.dsm and not io.file_exists(dsm_output_filename)) or \
                rerun_cell:

                # Extract boundaries and srs of point cloud
                summary_file_path = os.path.join(odm_dem_root, 'odm_georeferenced_model.summary.json')
                boundary_file_path = os.path.join(odm_dem_root, 'odm_georeferenced_model.boundary.json')

                system.run('pdal info --summary {0} > {1}'.format(tree.odm_georeferencing_model_las, summary_file_path), env_paths)
                system.run('pdal info --boundary {0} > {1}'.format(tree.odm_georeferencing_model_las,  boundary_file_path), env_paths)

                pc_proj4 = ""
                pc_geojson_bounds_feature = None

                with open(summary_file_path, 'r') as f:
                    json_f = json.loads(f.read())
                    pc_proj4 = json_f['summary']['srs']['proj4']

                with open(boundary_file_path, 'r') as f:
                    json_f = json.loads(f.read())
                    pc_geojson_boundary_feature = json_f['boundary']['boundary_json']

                # Write bounds to GeoJSON
                bounds_geojson_path = os.path.join(odm_dem_root, 'odm_georeferenced_model.bounds.geojson')
                with open(bounds_geojson_path, "w") as f:
                    f.write(json.dumps({
                        "type": "FeatureCollection",
                        "features": [{
                            "type": "Feature",
                            "geometry": pc_geojson_boundary_feature
                        }]
                    }))

                bounds_shapefile_path = os.path.join(odm_dem_root, 'bounds.shp')

                # Convert bounds to Shapefile
                kwargs = {
                    'input': bounds_geojson_path,
                    'output': bounds_shapefile_path,
                    'proj4': pc_proj4
                }
                system.run('ogr2ogr -overwrite -a_srs "{proj4}" {output} {input}'.format(**kwargs))

                # Process with lidar2dems
                terrain_params_map = {
                    'flatnonforest': (1, 3), 
                    'flatforest': (1, 2), 
                    'complexnonforest': (5, 2), 
                    'complexforest': (10, 2)
                }
                terrain_params = terrain_params_map[args.dem_terrain_type.lower()]             

                kwargs = {
                    'verbose': '-v' if self.params.verbose else '',
                    'slope': terrain_params[0],
                    'cellsize': terrain_params[1],
                    'outdir': odm_dem_root,
                    'site': bounds_shapefile_path
                }

                l2d_params = '--slope {slope} --cellsize {cellsize} ' \
                             '{verbose} ' \
                             '-o -s {site} ' \
                             '--outdir {outdir}'.format(**kwargs)

                approximate = '--approximate' if args.dem_approximate else ''

                # Classify only if we need a DTM
                run_classification = args.dtm

                if run_classification:
                    system.run('l2d_classify {0} --decimation {1} '
                               '{2} --initialDistance {3} {4}'.format(
                        l2d_params, args.dem_decimation, approximate, 
                        args.dem_initial_distance, tree.odm_georeferencing), env_paths)
                else:
                    log.ODM_INFO("Will skip classification, only DSM is needed")
                    copyfile(tree.odm_georeferencing_model_las, os.path.join(odm_dem_root, 'bounds-0_l2d_s{slope}c{cellsize}.las'.format(**kwargs)))

                products = []
                if args.dsm: products.append('dsm') 
                if args.dtm: products.append('dtm')

                radius_steps = [args.dem_resolution]
                for _ in range(args.dem_gapfill_steps - 1):
                    radius_steps.append(radius_steps[-1] * 3) # 3 is arbitrary, maybe there's a better value?

                for product in products:
                    demargs = {
                        'product': product,
                        'indir': odm_dem_root,
                        'l2d_params': l2d_params,
                        'maxsd': args.dem_maxsd,
                        'maxangle': args.dem_maxangle,
                        'resolution': args.dem_resolution,
                        'radius_steps': ' '.join(map(str, radius_steps)),
                        'gapfill': '--gapfill' if args.dem_gapfill_steps > 0 else '',
                        
                        # If we didn't run a classification, we should pass the decimate parameter here
                        'decimation': '--decimation {0}'.format(args.dem_decimation) if not run_classification else ''
                    }

                    system.run('l2d_dems {product} {indir} {l2d_params} '
                               '--maxsd {maxsd} --maxangle {maxangle} '
                               '--resolution {resolution} --radius {radius_steps} '
                               '{decimation} '
                               '{gapfill} '.format(**demargs), env_paths)

                    # Rename final output
                    if product == 'dsm':
                        os.rename(os.path.join(odm_dem_root, 'bounds-0_dsm.idw.tif'), dsm_output_filename)
                    elif product == 'dtm':
                        os.rename(os.path.join(odm_dem_root, 'bounds-0_dtm.idw.tif'), dtm_output_filename)

            else:
                log.ODM_WARNING('Found existing outputs in: %s' % odm_dem_root)
        else:
            log.ODM_WARNING('DEM will not be generated')

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Dem')

        log.ODM_INFO('Running ODM DEM Cell - Finished')
        return ecto.OK if args.end_with != 'odm_dem' else ecto.QUIT
Example #43
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM OpenSfM Cell')

        # get inputs
        tree = self.inputs.tree
        args = self.inputs.args
        photos = self.inputs.photos

        if not photos:
            log.ODM_ERROR('Not enough photos in photos array to start OpenSfM')
            return ecto.QUIT

        # create working directories     
        system.mkdir_p(tree.opensfm)
        system.mkdir_p(tree.pmvs)

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'opensfm') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'opensfm' in args.rerun_from)

        # check if reconstruction was done before

        if not io.file_exists(tree.opensfm_reconstruction) or rerun_cell:
            # create file list
            list_path = io.join_paths(tree.opensfm, 'image_list.txt')
            with open(list_path, 'w') as fout:
                for photo in photos:
                    fout.write('%s\n' % photo.path_file)

            # create config file for OpenSfM
            config = [
                "use_exif_size: %s" % ('no' if not self.params.use_exif_size else 'yes'),
                "feature_process_size: %s" % self.params.feature_process_size,
                "feature_min_frames: %s" % self.params.feature_min_frames,
                "processes: %s" % self.params.processes,
                "matching_gps_neighbors: %s" % self.params.matching_gps_neighbors
            ]

            if args.matcher_distance > 0:
                config.append("matching_gps_distance: %s" % self.params.matching_gps_distance)

            # write config file
            config_filename = io.join_paths(tree.opensfm, 'config.yaml')
            with open(config_filename, 'w') as fout:
                fout.write("\n".join(config))

            # run OpenSfM reconstruction
            system.run('PYTHONPATH=%s %s/bin/run_all %s' %
                       (context.pyopencv_path, context.opensfm_path, tree.opensfm))
        else:
            log.ODM_WARNING('Found a valid OpenSfM file in: %s' %
                            tree.opensfm_reconstruction)

        # check if reconstruction was exported to bundler before

        if not io.file_exists(tree.opensfm_bundle_list) or rerun_cell:
            # convert back to bundler's format
            system.run('PYTHONPATH=%s %s/bin/export_bundler %s' %
                       (context.pyopencv_path, context.opensfm_path, tree.opensfm))
        else:
            log.ODM_WARNING('Found a valid Bundler file in: %s' %
                            tree.opensfm_reconstruction)

        # check if reconstruction was exported to pmvs before

        if not io.file_exists(tree.pmvs_visdat) or rerun_cell:
            # run PMVS converter
            system.run('PYTHONPATH=%s %s/bin/export_pmvs %s --output %s' %
                       (context.pyopencv_path, context.opensfm_path, tree.opensfm, tree.pmvs))
        else:
            log.ODM_WARNING('Found a valid CMVS file in: %s' % tree.pmvs_visdat)

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'OpenSfM')

        log.ODM_INFO('Running ODM OpenSfM Cell - Finished')
        return ecto.OK if args.end_with != 'opensfm' else ecto.QUIT
Example #44
0
    def process(self, inputs, outputs):
        """Run the cell."""
        log.ODM_INFO('Running OMD Slam Cell')

        # get inputs
        tree = self.inputs.tree
        args = self.inputs.args
        video = os.path.join(tree.root_path, args.video)
        slam_config = os.path.join(tree.root_path, args.slam_config)

        if not video:
            log.ODM_ERROR('No video provided')
            return ecto.QUIT

        # create working directories
        system.mkdir_p(tree.opensfm)
        system.mkdir_p(tree.pmvs)

        vocabulary = os.path.join(context.orb_slam2_path,
                                  'Vocabulary/ORBvoc.txt')
        orb_slam_cmd = os.path.join(context.odm_modules_path, 'odm_slam')
        trajectory = os.path.join(tree.opensfm, 'KeyFrameTrajectory.txt')
        map_points = os.path.join(tree.opensfm, 'MapPoints.txt')

        # check if we rerun cell or not
        rerun_cell = args.rerun == 'slam'

        # check if slam was run before
        if not io.file_exists(trajectory) or rerun_cell:
            # run slam binary
            system.run(' '.join([
                'cd {} &&'.format(tree.opensfm),
                orb_slam_cmd,
                vocabulary,
                slam_config,
                video,
            ]))
        else:
            log.ODM_WARNING('Found a valid slam trajectory in: {}'.format(
                trajectory))

        # check if trajectory was exported to opensfm before
        if not io.file_exists(tree.opensfm_reconstruction) or rerun_cell:
            # convert slam to opensfm
            system.run(' '.join([
                'cd {} &&'.format(tree.opensfm),
                'PYTHONPATH={}:{}'.format(context.pyopencv_path,
                                          context.opensfm_path),
                'python',
                os.path.join(context.odm_modules_src_path,
                             'odm_slam/src/orb_slam_to_opensfm.py'),
                video,
                trajectory,
                map_points,
                slam_config,
            ]))
            # link opensfm images to resized images
            os.symlink(tree.opensfm + '/images', tree.dataset_resize)
        else:
            log.ODM_WARNING('Found a valid OpenSfM file in: {}'.format(
                tree.opensfm_reconstruction))

        # check if reconstruction was exported to bundler before
        if not io.file_exists(tree.opensfm_bundle_list) or rerun_cell:
            # convert back to bundler's format
            system.run(
                'PYTHONPATH={} {}/bin/export_bundler {}'.format(
                    context.pyopencv_path, context.opensfm_path, tree.opensfm))
        else:
            log.ODM_WARNING(
                'Found a valid Bundler file in: {}'.format(
                    tree.opensfm_reconstruction))

        # check if reconstruction was exported to pmvs before
        if not io.file_exists(tree.pmvs_visdat) or rerun_cell:
            # run PMVS converter
            system.run(
                'PYTHONPATH={} {}/bin/export_pmvs {} --output {}'.format(
                    context.pyopencv_path, context.opensfm_path, tree.opensfm,
                    tree.pmvs))
        else:
            log.ODM_WARNING('Found a valid CMVS file in: {}'.format(
                tree.pmvs_visdat))

        log.ODM_INFO('Running OMD Slam Cell - Finished')
        return ecto.OK if args.end_with != 'odm_slam' else ecto.QUIT
Example #45
0
    def process(self, inputs, outputs):
        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running MVE Cell')

        # get inputs
        tree = inputs.tree
        args = inputs.args
        reconstruction = inputs.reconstruction
        photos = reconstruction.photos

        if not photos:
            log.ODM_ERROR('Not enough photos in photos array to start MVE')
            return ecto.QUIT

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'mve') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'mve' in args.rerun_from)

        # check if reconstruction was done before
        if not io.file_exists(tree.mve_model) or rerun_cell:
            # cleanup if a rerun
            if io.dir_exists(tree.mve_path) and rerun_cell:
                shutil.rmtree(tree.mve_path)

            # make bundle directory
            if not io.file_exists(tree.mve_bundle):
                system.mkdir_p(tree.mve_path)
                system.mkdir_p(io.join_paths(tree.mve_path, 'bundle'))
                io.copy(tree.opensfm_image_list, tree.mve_image_list)
                io.copy(tree.opensfm_bundle, tree.mve_bundle)

            # mve makescene wants the output directory
            # to not exists before executing it (otherwise it
            # will prompt the user for confirmation)
            if io.dir_exists(tree.mve):
                shutil.rmtree(tree.mve)

            # run mve makescene
            if not io.dir_exists(tree.mve_views):
                system.run('%s %s %s' % (context.makescene_path, tree.mve_path, tree.mve), env_vars={'OMP_NUM_THREADS': args.max_concurrency})

            # Compute mve output scale based on depthmap_resolution
            max_width = 0
            max_height = 0
            for photo in photos:
                max_width = max(photo.width, max_width)
                max_height = max(photo.height, max_height)

            max_pixels = args.depthmap_resolution * args.depthmap_resolution
            if max_width * max_height <= max_pixels:
                mve_output_scale = 0
            else:
                ratio = float(max_width * max_height) / float(max_pixels)
                mve_output_scale = int(math.ceil(math.log(ratio) / math.log(4.0)))

            dmrecon_config = [
                "-s%s" % mve_output_scale,
	            "--progress=silent",
                "--local-neighbors=2",
                "--force",
            ]

            # Run MVE's dmrecon
            log.ODM_INFO('                                                                               ')
            log.ODM_INFO('                                    ,*/**                                      ')
            log.ODM_INFO('                                  ,*@%*/@%*                                    ')
            log.ODM_INFO('                                ,/@%******@&*.                                 ')
            log.ODM_INFO('                              ,*@&*********/@&*                                ')
            log.ODM_INFO('                            ,*@&**************@&*                              ')
            log.ODM_INFO('                          ,/@&******************@&*.                           ')
            log.ODM_INFO('                        ,*@&*********************/@&*                          ')
            log.ODM_INFO('                      ,*@&**************************@&*.                       ')
            log.ODM_INFO('                    ,/@&******************************&&*,                     ')
            log.ODM_INFO('                  ,*&&**********************************@&*.                   ')
            log.ODM_INFO('                ,*@&**************************************@&*.                 ')
            log.ODM_INFO('              ,*@&***************#@@@@@@@@@%****************&&*,               ')
            log.ODM_INFO('            .*&&***************&@@@@@@@@@@@@@@****************@@*.             ')
            log.ODM_INFO('          .*@&***************&@@@@@@@@@@@@@@@@@%****(@@%********@@*.           ')
            log.ODM_INFO('        .*@@***************%@@@@@@@@@@@@@@@@@@@@@#****&@@@@%******&@*,         ')
            log.ODM_INFO('      .*&@****************@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@/*****@@*.       ')
            log.ODM_INFO('    .*@@****************@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@%*************@@*.     ')
            log.ODM_INFO('  .*@@****/***********@@@@@&**(@@@@@@@@@@@@@@@@@@@@@@@#*****************%@*,   ')
            log.ODM_INFO(' */@*******@*******#@@@@%*******/@@@@@@@@@@@@@@@@@@@@********************/@(,  ')
            log.ODM_INFO(' ,*@(********&@@@@@@#**************/@@@@@@@#**(@@&/**********************@&*   ')
            log.ODM_INFO('   *#@/*******************************@@@@@***&@&**********************&@*,    ')
            log.ODM_INFO('     *#@#******************************&@@@***@#*********************&@*,      ')
            log.ODM_INFO('       */@#*****************************@@@************************@@*.        ')
            log.ODM_INFO('         *#@/***************************/@@/*********************%@*,          ')
            log.ODM_INFO('           *#@#**************************#@@%******************%@*,            ')
            log.ODM_INFO('             */@#*************************(@@@@@@@&%/********&@*.              ')
            log.ODM_INFO('               *(@(*********************************/%@@%**%@*,                ')
            log.ODM_INFO('                 *(@%************************************%@**                  ')
            log.ODM_INFO('                   **@%********************************&@*,                    ')
            log.ODM_INFO('                     *(@(****************************%@/*                      ')
            log.ODM_INFO('                       ,(@%************************#@/*                        ')
            log.ODM_INFO('                         ,*@%********************&@/,                          ')
            log.ODM_INFO('                           */@#****************#@/*                            ')
            log.ODM_INFO('                             ,/@&************#@/*                              ')
            log.ODM_INFO('                               ,*@&********%@/,                                ')
            log.ODM_INFO('                                 */@#****(@/*                                  ')
            log.ODM_INFO('                                   ,/@@@@(*                                    ')
            log.ODM_INFO('                                     .**,                                      ')
            log.ODM_INFO('')
            log.ODM_INFO("Running dense reconstruction. This might take a while. Please be patient, the process is not dead or hung.")
            log.ODM_INFO("                              Process is running")
            system.run('%s %s %s' % (context.dmrecon_path, ' '.join(dmrecon_config), tree.mve), env_vars={'OMP_NUM_THREADS': args.max_concurrency})

            scene2pset_config = [
                "-F%s" % mve_output_scale
            ]

            # run scene2pset
            system.run('%s %s "%s" "%s"' % (context.scene2pset_path, ' '.join(scene2pset_config), tree.mve, tree.mve_model), env_vars={'OMP_NUM_THREADS': args.max_concurrency})
        else:
            log.ODM_WARNING('Found a valid MVE reconstruction file in: %s' %
                            tree.mve_model)

        outputs.reconstruction = reconstruction

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'MVE')

        log.ODM_INFO('Running ODM MVE Cell - Finished')
        return ecto.OK if args.end_with != 'mve' else ecto.QUIT
Example #46
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running MVS Texturing Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree

        # define paths and create working directories
        system.mkdir_p(tree.odm_texturing)

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'mvs_texturing') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'mvs_texturing' in args.rerun_from)

        if not io.file_exists(tree.odm_textured_model_obj) or rerun_cell:
            log.ODM_DEBUG('Writing MVS Textured file in: %s'
                          % tree.odm_textured_model_obj)
            
            
            # Format arguments to fit Mvs-Texturing app
            skipGeometricVisibilityTest = ""
            skipGlobalSeamLeveling = ""
            skipLocalSeamLeveling = ""
            skipHoleFilling = ""
            keepUnseenFaces = ""
            
            if (self.params.skip_vis_test):
                skipGeometricVisibilityTest = "--skip_geometric_visibility_test"
            if (self.params.skip_glob_seam_leveling):
                skipGlobalSeamLeveling = "--skip_global_seam_leveling"
            if (self.params.skip_loc_seam_leveling):
                skipLocalSeamLeveling = "--skip_local_seam_leveling"
            if (self.params.skip_hole_fill):
                skipHoleFilling = "--skip_hole_filling"
            if (self.params.keep_unseen_faces):
                keepUnseenFaces = "--keep_unseen_faces"

            # mvstex definitions
            kwargs = {
                'bin': context.mvstex_path,
                'out_dir': io.join_paths(tree.odm_texturing, "odm_textured_model"),
                'pmvs_folder': tree.pmvs_rec_path,
                'nvm_file': io.join_paths(tree.pmvs_rec_path, "nvmCams.nvm"),
                'model': tree.odm_mesh,
                'dataTerm': self.params.data_term,
                'outlierRemovalType': self.params.outlier_rem_type,
                'skipGeometricVisibilityTest': skipGeometricVisibilityTest,
                'skipGlobalSeamLeveling': skipGlobalSeamLeveling,
                'skipLocalSeamLeveling': skipLocalSeamLeveling,
                'skipHoleFilling': skipHoleFilling,
                'keepUnseenFaces': keepUnseenFaces,
                'toneMapping': self.params.tone_mapping
            }

            if not args.use_pmvs:
                kwargs['nvm_file'] = io.join_paths(tree.opensfm,
                                                   "reconstruction.nvm")
            else:
                log.ODM_DEBUG('Generating .nvm file from pmvs output: %s'
                              % '{nvm_file}'.format(**kwargs))

                # Create .nvm camera file.
                pmvs2nvmcams.run('{pmvs_folder}'.format(**kwargs),
                                 '{nvm_file}'.format(**kwargs))

            # run texturing binary
            system.run('{bin} {nvm_file} {model} {out_dir} '
                       '-d {dataTerm} -o {outlierRemovalType} '
                       '-t {toneMapping} '
                       '{skipGeometricVisibilityTest} '
                       '{skipGlobalSeamLeveling} '
                       '{skipLocalSeamLeveling} '
                       '{skipHoleFilling} '
                       '{keepUnseenFaces}'.format(**kwargs))
        else:
            log.ODM_WARNING('Found a valid ODM Texture file in: %s'
                            % tree.odm_textured_model_obj)

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Texturing')

        log.ODM_INFO('Running ODM Texturing Cell - Finished')
        return ecto.OK if args.end_with != 'odm_texturing' else ecto.QUIT
Example #47
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running OMD OrthoPhoto Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree

        # define paths and create working directories
        system.mkdir_p(tree.odm_orthophoto)

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'odm_orthophoto') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'odm_orthophoto' in args.rerun_from)

        if not io.file_exists(tree.odm_orthophoto_file) or rerun_cell:

            # odm_orthophoto definitions
            kwargs = {
                'bin': context.odm_modules_path,
                'model_geo': tree.odm_georeferencing_model_obj_geo,
                'log': tree.odm_orthophoto_log,
                'ortho': tree.odm_orthophoto_file,
                'corners': tree.odm_orthophoto_corners,
                'res': self.params.resolution
            }

            # run odm_orthophoto
            system.run('{bin}/odm_orthophoto -inputFile {model_geo} '
                       '-logFile {log} -outputFile {ortho} -resolution {res} '
                       '-outputCornerFile {corners}'.format(**kwargs))

            # Create georeferenced GeoTiff
            geotiffcreated = False
            georef = types.ODM_GeoRef()
            # creates the coord refs # TODO I don't want to have to do this twice- after odm_georef
            georef.parse_coordinate_system(tree.odm_georeferencing_coords)

            if georef.epsg and georef.utm_east_offset and georef.utm_north_offset:
                ulx = uly = lrx = lry = 0.0
                with open(tree.odm_orthophoto_corners) as f:
                    for lineNumber, line in enumerate(f):
                        if lineNumber == 0:
                            tokens = line.split(' ')
                            if len(tokens) == 4:
                                ulx = float(tokens[0]) + \
                                    float(georef.utm_east_offset)
                                lry = float(tokens[1]) + \
                                    float(georef.utm_north_offset)
                                lrx = float(tokens[2]) + \
                                    float(georef.utm_east_offset)
                                uly = float(tokens[3]) + \
                                    float(georef.utm_north_offset)
                log.ODM_INFO('Creating GeoTIFF')

                kwargs = {
                    'ulx': ulx,
                    'uly': uly,
                    'lrx': lrx,
                    'lry': lry,
                    'epsg': georef.epsg,
                    'png': tree.odm_orthophoto_file,
                    'tiff': tree.odm_orthophoto_tif,
                    'log': tree.odm_orthophoto_tif_log
                }

                system.run('gdal_translate -a_ullr {ulx} {uly} {lrx} {lry} '
                           '-a_srs \"EPSG:{epsg}\" {png} {tiff} > {log}'.format(**kwargs))
                geotiffcreated = True
            if not geotiffcreated:
                log.ODM_WARNING('No geo-referenced orthophoto created due '
                                'to missing geo-referencing or corner coordinates.')

        else:
            log.ODM_WARNING('Found a valid orthophoto in: %s' % tree.odm_orthophoto_file)

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Orthophoto')

        log.ODM_INFO('Running ODM OrthoPhoto Cell - Finished')
        return ecto.OK if args.end_with != 'odm_orthophoto' else ecto.QUIT
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM Orthophoto Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree
        reconstruction = inputs.reconstruction
        verbose = '-verbose' if self.params.verbose else ''

        # define paths and create working directories
        system.mkdir_p(tree.odm_orthophoto)

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'odm_orthophoto') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'odm_orthophoto' in args.rerun_from)

        if not io.file_exists(tree.odm_orthophoto_file) or rerun_cell:

            # odm_orthophoto definitions
            kwargs = {
                'bin': context.odm_modules_path,
                'log': tree.odm_orthophoto_log,
                'ortho': tree.odm_orthophoto_file,
                'corners': tree.odm_orthophoto_corners,
                'res': 1.0 / (gsd.cap_resolution(self.params.resolution, tree.opensfm_reconstruction, ignore_gsd=args.ignore_gsd) / 100.0),
                'verbose': verbose
            }

            # Have geo coordinates?
            georef = reconstruction.georef

            # Check if the georef object is initialized
            # (during a --rerun this might not be)
            # TODO: we should move this to a more central
            # location (perhaps during the dataset initialization)
            if georef and not georef.utm_east_offset:
                georeferencing_dir = tree.odm_georeferencing if args.use_3dmesh and not args.skip_3dmodel else tree.odm_25dgeoreferencing
                odm_georeferencing_model_txt_geo_file = os.path.join(georeferencing_dir, tree.odm_georeferencing_model_txt_geo)

                if io.file_exists(odm_georeferencing_model_txt_geo_file):
                    georef.extract_offsets(odm_georeferencing_model_txt_geo_file)
                else:
                    log.ODM_WARNING('Cannot read UTM offset from {}. An orthophoto will not be generated.'.format(odm_georeferencing_model_txt_geo_file))


            if georef:
                if args.use_3dmesh:
                    kwargs['model_geo'] = os.path.join(tree.odm_texturing, tree.odm_georeferencing_model_obj_geo)
                else:
                    kwargs['model_geo'] = os.path.join(tree.odm_25dtexturing, tree.odm_georeferencing_model_obj_geo)
            else:
                if args.use_3dmesh:
                    kwargs['model_geo'] = os.path.join(tree.odm_texturing, tree.odm_textured_model_obj)
                else:
                    kwargs['model_geo'] = os.path.join(tree.odm_25dtexturing, tree.odm_textured_model_obj)

            # run odm_orthophoto
            system.run('{bin}/odm_orthophoto -inputFile {model_geo} '
                       '-logFile {log} -outputFile {ortho} -resolution {res} {verbose} '
                       '-outputCornerFile {corners}'.format(**kwargs))

            # Create georeferenced GeoTiff
            geotiffcreated = False

            if georef and georef.projection and georef.utm_east_offset and georef.utm_north_offset:
                ulx = uly = lrx = lry = 0.0
                with open(tree.odm_orthophoto_corners) as f:
                    for lineNumber, line in enumerate(f):
                        if lineNumber == 0:
                            tokens = line.split(' ')
                            if len(tokens) == 4:
                                ulx = float(tokens[0]) + \
                                    float(georef.utm_east_offset)
                                lry = float(tokens[1]) + \
                                    float(georef.utm_north_offset)
                                lrx = float(tokens[2]) + \
                                    float(georef.utm_east_offset)
                                uly = float(tokens[3]) + \
                                    float(georef.utm_north_offset)
                log.ODM_INFO('Creating GeoTIFF')

                kwargs = {
                    'ulx': ulx,
                    'uly': uly,
                    'lrx': lrx,
                    'lry': lry,
                    'tiled': '' if self.params.no_tiled else '-co TILED=yes ',
                    'compress': self.params.compress,
                    'predictor': '-co PREDICTOR=2 ' if self.params.compress in
                                                       ['LZW', 'DEFLATE'] else '',
                    'proj': georef.projection.srs,
                    'bigtiff': self.params.bigtiff,
                    'png': tree.odm_orthophoto_file,
                    'tiff': tree.odm_orthophoto_tif,
                    'log': tree.odm_orthophoto_tif_log,
                    'max_memory': get_max_memory(),
                    'threads': self.params.max_concurrency
                }

                system.run('gdal_translate -a_ullr {ulx} {uly} {lrx} {lry} '
                           '{tiled} '
                           '-co BIGTIFF={bigtiff} '
                           '-co COMPRESS={compress} '
                           '{predictor} '
                           '-co BLOCKXSIZE=512 '
                           '-co BLOCKYSIZE=512 '
                           '-co NUM_THREADS={threads} '
                           '-a_srs \"{proj}\" '
                           '--config GDAL_CACHEMAX {max_memory}% '
                           '{png} {tiff} > {log}'.format(**kwargs))

                if args.crop > 0:
                    shapefile_path = os.path.join(tree.odm_georeferencing, 'odm_georeferenced_model.bounds.shp')
                    Cropper.crop(shapefile_path, tree.odm_orthophoto_tif, {
                            'TILED': 'NO' if self.params.no_tiled else 'YES',
                            'COMPRESS': self.params.compress,
                            'PREDICTOR': '2' if self.params.compress in ['LZW', 'DEFLATE'] else '1',
                            'BIGTIFF': self.params.bigtiff,
                            'BLOCKXSIZE': 512,
                            'BLOCKYSIZE': 512,
                            'NUM_THREADS': self.params.max_concurrency
                        })

                if self.params.build_overviews:
                    log.ODM_DEBUG("Building Overviews")
                    kwargs = {
                        'orthophoto': tree.odm_orthophoto_tif,
                        'log': tree.odm_orthophoto_gdaladdo_log
                    }
                    # Run gdaladdo
                    system.run('gdaladdo -ro -r average '
                               '--config BIGTIFF_OVERVIEW IF_SAFER '
                               '--config COMPRESS_OVERVIEW JPEG '
                               '{orthophoto} 2 4 8 16 > {log}'.format(**kwargs))

                geotiffcreated = True
            if not geotiffcreated:
                log.ODM_WARNING('No geo-referenced orthophoto created due '
                                'to missing geo-referencing or corner coordinates.')

        else:
            log.ODM_WARNING('Found a valid orthophoto in: %s' % tree.odm_orthophoto_file)

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Orthophoto')

        log.ODM_INFO('Running ODM OrthoPhoto Cell - Finished')
        return ecto.OK if args.end_with != 'odm_orthophoto' else ecto.QUIT
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM Texturing Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree
        verbose = '-verbose' if self.params.verbose else ''

        # define paths and create working directories
        system.mkdir_p(tree.odm_texturing)

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'odm_texturing') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'odm_texturing' in args.rerun_from)

        # Undistort radial distortion
        if not os.path.isdir(tree.odm_texturing_undistorted_image_path) or rerun_cell:
            system.run(' '.join([
                'cd {} &&'.format(tree.opensfm),
                'PYTHONPATH={}:{}'.format(context.pyopencv_path,
                                          context.opensfm_path),
                'python',
                os.path.join(context.odm_modules_src_path,
                             'odm_slam/src/undistort_radial.py'),
                '--output',
                tree.odm_texturing_undistorted_image_path,
                tree.opensfm,
            ]))

            system.run(
                'PYTHONPATH=%s %s/bin/export_bundler %s' %
                (context.pyopencv_path, context.opensfm_path, tree.opensfm))
        else:
            log.ODM_WARNING(
                'Found a valid Bundler file in: %s' %
                (tree.opensfm_reconstruction))

        if not io.file_exists(tree.odm_textured_model_obj) or rerun_cell:
            log.ODM_DEBUG('Writing ODM Textured file in: %s'
                          % tree.odm_textured_model_obj)

            # odm_texturing definitions
            kwargs = {
                'bin': context.odm_modules_path,
                'out_dir': tree.odm_texturing,
                'bundle': tree.opensfm_bundle,
                'imgs_path': tree.odm_texturing_undistorted_image_path,
                'imgs_list': tree.opensfm_bundle_list,
                'model': tree.odm_mesh,
                'log': tree.odm_texuring_log,
                'resize': self.params.resize,
                'resolution': self.params.resolution,
                'size': self.params.size,
                'verbose': verbose
            }

            # run texturing binary
            system.run('{bin}/odm_texturing -bundleFile {bundle} '
                       '-imagesPath {imgs_path} -imagesListPath {imgs_list} '
                       '-inputModelPath {model} -outputFolder {out_dir}/ '
                       '-textureResolution {resolution} -bundleResizedTo {resize} {verbose} '
                       '-textureWithSize {size} -logFile {log}'.format(**kwargs))
        else:
            log.ODM_WARNING('Found a valid ODM Texture file in: %s'
                            % tree.odm_textured_model_obj)

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Texturing')

        log.ODM_INFO('Running ODM Texturing Cell - Finished')
        return ecto.OK if args.end_with != 'odm_texturing' else ecto.QUIT
Example #50
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running MVS Texturing Cell')

        # get inputs
        args = inputs.args
        tree = inputs.tree
        reconstruction = inputs.reconstruction

        # define paths and create working directories
        system.mkdir_p(tree.odm_texturing)
        if not args.use_3dmesh: system.mkdir_p(tree.odm_25dtexturing)

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'mvs_texturing') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'mvs_texturing' in args.rerun_from)

        runs = [{
            'out_dir': tree.odm_texturing,
            'model': tree.odm_mesh,
            'nadir': False
        }]

        if args.skip_3dmodel:
            runs = []

        if not args.use_3dmesh:
            runs += [{
                    'out_dir': tree.odm_25dtexturing,
                    'model': tree.odm_25dmesh,
                    'nadir': True
                }]

        for r in runs:
            odm_textured_model_obj = os.path.join(r['out_dir'], tree.odm_textured_model_obj)

            if not io.file_exists(odm_textured_model_obj) or rerun_cell:
                log.ODM_DEBUG('Writing MVS Textured file in: %s'
                              % odm_textured_model_obj)

                # Format arguments to fit Mvs-Texturing app
                skipGeometricVisibilityTest = ""
                skipGlobalSeamLeveling = ""
                skipLocalSeamLeveling = ""
                skipHoleFilling = ""
                keepUnseenFaces = ""
                nadir = ""

                if (self.params.skip_vis_test):
                    skipGeometricVisibilityTest = "--skip_geometric_visibility_test"
                if (self.params.skip_glob_seam_leveling):
                    skipGlobalSeamLeveling = "--skip_global_seam_leveling"
                if (self.params.skip_loc_seam_leveling):
                    skipLocalSeamLeveling = "--skip_local_seam_leveling"
                if (self.params.skip_hole_fill):
                    skipHoleFilling = "--skip_hole_filling"
                if (self.params.keep_unseen_faces):
                    keepUnseenFaces = "--keep_unseen_faces"
                if (r['nadir']):
                    nadir = '--nadir_mode'

                # mvstex definitions
                kwargs = {
                    'bin': context.mvstex_path,
                    'out_dir': io.join_paths(r['out_dir'], "odm_textured_model"),
                    'model': r['model'],
                    'dataTerm': self.params.data_term,
                    'outlierRemovalType': self.params.outlier_rem_type,
                    'skipGeometricVisibilityTest': skipGeometricVisibilityTest,
                    'skipGlobalSeamLeveling': skipGlobalSeamLeveling,
                    'skipLocalSeamLeveling': skipLocalSeamLeveling,
                    'skipHoleFilling': skipHoleFilling,
                    'keepUnseenFaces': keepUnseenFaces,
                    'toneMapping': self.params.tone_mapping,
                    'nadirMode': nadir,
                    'nadirWeight': 2 ** args.texturing_nadir_weight - 1,
                    'nvm_file': io.join_paths(tree.opensfm, "reconstruction.nvm")
                }

                # Make sure tmp directory is empty
                mvs_tmp_dir = os.path.join(r['out_dir'], 'tmp')
                if io.dir_exists(mvs_tmp_dir):
                    log.ODM_INFO("Removing old tmp directory {}".format(mvs_tmp_dir))
                    shutil.rmtree(mvs_tmp_dir)

                # run texturing binary
                system.run('{bin} {nvm_file} {model} {out_dir} '
                           '-d {dataTerm} -o {outlierRemovalType} '
                           '-t {toneMapping} '
                           '{skipGeometricVisibilityTest} '
                           '{skipGlobalSeamLeveling} '
                           '{skipLocalSeamLeveling} '
                           '{skipHoleFilling} '
                           '{keepUnseenFaces} '
                           '{nadirMode} '
                           '-n {nadirWeight}'.format(**kwargs))
            else:
                log.ODM_WARNING('Found a valid ODM Texture file in: %s'
                                % odm_textured_model_obj)

        outputs.reconstruction = reconstruction

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Texturing')

        log.ODM_INFO('Running ODM Texturing Cell - Finished')
        return ecto.OK if args.end_with != 'mvs_texturing' else ecto.QUIT
Example #51
0
    submodels_path = io.join_paths(args.dataset, 'submodels')
    sfm_path = io.join_paths(args.dataset, 'opensfm')
    meta_data = metadataset.MetaDataSet(sfm_path)
    data = metadataset.DataSet(sfm_path)
    voronoi_file = io.join_paths(meta_data.data_path, 'voronoi.geojson')
    proj_path = io.join_paths(args.dataset, "odm_georeferencing/proj.txt")
    out_tif = io.join_paths(args.dataset, "merged.tif")
    addo_log = io.join_paths(args.dataset, "gdal_addo.log")

    bounds_files = {}
    for folder in os.listdir(io.join_paths(args.dataset, 'submodels')):
        if 'submodel' in folder:
            folder_number = '0' if folder.split('_')[1] == '0000' else folder.split('_')[1].lstrip('0')
            bounds_file = io.join_paths(submodels_path, folder +
                                        "/odm_georeferencing/odm_georeferenced_model.bounds.geojson")
            if io.file_exists(bounds_file):
                bounds_files[folder_number] = bounds_file

    # Do voronoi calcs
    # # load clusters
    images, positions, labels, centers = meta_data.load_clusters()
    cluster_proj = pyproj.Proj(init='epsg:4326')
    with open(proj_path, 'r') as fr:
        transform_proj = pyproj.Proj(fr.read())

    # projection transformation
    project = partial(
        pyproj.transform,
        cluster_proj,
        transform_proj)
    def process(self, inputs, outputs):

        # find a file in the root directory
        def find(file, dir):
            for root, dirs, files in os.walk(dir):
                return '/'.join((root, file)) if file in files else None

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM Georeferencing Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree
        gcpfile = io.join_paths(tree.root_path, self.params.gcp_file) \
            if self.params.gcp_file else find('gcp_list.txt', tree.root_path)
        geocreated = True
        verbose = '-verbose' if self.params.verbose else ''

        # define paths and create working directories
        system.mkdir_p(tree.odm_georeferencing)

        # in case a gcp file it's not provided, let's try to generate it using
        # images metadata. Internally calls jhead.
        log.ODM_DEBUG(self.params.gcp_file)
        if not self.params.gcp_file: # and \
        #   not io.file_exists(tree.odm_georeferencing_coords):
            
            log.ODM_WARNING('No coordinates file. '
                            'Generating coordinates file: %s'
                            % tree.odm_georeferencing_coords)

            # odm_georeference definitions
            kwargs = {
                'bin': context.odm_modules_path,
                'imgs': tree.dataset_resize,
                'imgs_list': tree.opensfm_bundle_list,
                'coords': tree.odm_georeferencing_coords,
                'log': tree.odm_georeferencing_utm_log,
                'verbose': verbose
            }

            # run UTM extraction binary
            extract_utm = system.run_and_return('{bin}/odm_extract_utm -imagesPath {imgs}/ '
                       '-imageListFile {imgs_list} -outputCoordFile {coords} {verbose} '
                       '-logFile {log}'.format(**kwargs))

            if extract_utm != '':
                log.ODM_WARNING('Could not generate coordinates file. '
                                'Ignore if there is a GCP file. Error: %s'
                                % extract_utm)


        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'odm_georeferencing') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'odm_georeferencing' in args.rerun_from)

        if not io.file_exists(tree.odm_georeferencing_model_obj_geo) or \
           not io.file_exists(tree.odm_georeferencing_model_ply_geo) or rerun_cell:

            # odm_georeference definitions
            kwargs = {
                'bin': context.odm_modules_path,
                'bundle': tree.opensfm_bundle,
                'imgs': tree.dataset_resize,
                'imgs_list': tree.opensfm_bundle_list,
                'model': tree.odm_textured_model_obj,
                'log': tree.odm_georeferencing_log,
                'coords': tree.odm_georeferencing_coords,
                'pc_geo': tree.odm_georeferencing_model_ply_geo,
                'geo_sys': tree.odm_georeferencing_model_txt_geo,
                'model_geo': tree.odm_georeferencing_model_obj_geo,
                'size': self.params.img_size,
                'gcp': gcpfile,
                'verbose': verbose

            }
            if args.use_opensfm_pointcloud:
                kwargs['pc'] = tree.opensfm_model
            else:
                kwargs['pc'] = tree.pmvs_model

            # Check to see if the GCP file exists

            if not self.params.use_exif and (self.params.gcp_file or find('gcp_list.txt', tree.root_path)):
                log.ODM_INFO('Found %s' % gcpfile)
                try:
                    system.run('{bin}/odm_georef -bundleFile {bundle} -imagesPath {imgs} -imagesListPath {imgs_list} '
                               '-bundleResizedTo {size} -inputFile {model} -outputFile {model_geo} '
                               '-inputPointCloudFile {pc} -outputPointCloudFile {pc_geo} {verbose} '
                               '-logFile {log} -georefFileOutputPath {geo_sys} -gcpFile {gcp} '
                               '-outputCoordFile {coords}'.format(**kwargs))
                except Exception:
                    log.ODM_EXCEPTION('Georeferencing failed. ')
                    return ecto.QUIT
            elif io.file_exists(tree.odm_georeferencing_coords):
                log.ODM_INFO('Running georeferencing with generated coords file.')
                system.run('{bin}/odm_georef -bundleFile {bundle} -inputCoordFile {coords} '
                           '-inputFile {model} -outputFile {model_geo} '
                           '-inputPointCloudFile {pc} -outputPointCloudFile {pc_geo} {verbose} '
                           '-logFile {log} -georefFileOutputPath {geo_sys}'.format(**kwargs))
            else:
                log.ODM_WARNING('Georeferencing failed. Make sure your '
                                'photos have geotags in the EXIF or you have '
                                'provided a GCP file. ')
                geocreated = False # skip the rest of the georeferencing

            if geocreated:
                # update images metadata
                geo_ref = types.ODM_GeoRef()
                geo_ref.parse_coordinate_system(tree.odm_georeferencing_coords)

                for idx, photo in enumerate(self.inputs.photos):
                    geo_ref.utm_to_latlon(tree.odm_georeferencing_latlon, photo, idx)

                # convert ply model to LAS reference system
                geo_ref.convert_to_las(tree.odm_georeferencing_model_ply_geo,
                                       tree.odm_georeferencing_pdal)

                # XYZ point cloud output
                log.ODM_INFO("Creating geo-referenced CSV file (XYZ format, can be used with GRASS to create DEM)")
                with open(tree.odm_georeferencing_xyz_file, "wb") as csvfile:
                    csvfile_writer = csv.writer(csvfile, delimiter=",")
                    reachedpoints = False
                    with open(tree.odm_georeferencing_model_ply_geo) as f:
                        for lineNumber, line in enumerate(f):
                            if reachedpoints:
                                tokens = line.split(" ")
                                csv_line = [float(tokens[0])+geo_ref.utm_east_offset,
                                            float(tokens[1])+geo_ref.utm_north_offset,
                                            tokens[2]]
                                csvfile_writer.writerow(csv_line)
                            if line.startswith("end_header"):
                                reachedpoints = True
                csvfile.close()

        else:
            log.ODM_WARNING('Found a valid georeferenced model in: %s'
                            % tree.odm_georeferencing_model_ply_geo)

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Georeferencing')

        log.ODM_INFO('Running ODM Georeferencing Cell - Finished')
        return ecto.OK if args.end_with != 'odm_georeferencing' else ecto.QUIT
Example #53
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM OpenSfM Cell')

        # get inputs
        tree = inputs.tree
        args = inputs.args
        reconstruction = inputs.reconstruction
        photos = reconstruction.photos

        if not photos:
            log.ODM_ERROR('Not enough photos in photos array to start OpenSfM')
            return ecto.QUIT

        # create working directories
        system.mkdir_p(tree.opensfm)

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'opensfm') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'opensfm' in args.rerun_from)

        if args.fast_orthophoto:
            output_file = io.join_paths(tree.opensfm, 'reconstruction.ply')
        elif args.use_opensfm_dense:
            output_file = tree.opensfm_model
        else:
            output_file = tree.opensfm_reconstruction

        # check if reconstruction was done before
        if not io.file_exists(output_file) or rerun_cell:
            # create file list
            list_path = io.join_paths(tree.opensfm, 'image_list.txt')
            has_alt = True
            with open(list_path, 'w') as fout:
                for photo in photos:
                    if not photo.altitude:
                        has_alt = False
                    fout.write('%s\n' % io.join_paths(tree.dataset_raw, photo.filename))

            # create config file for OpenSfM
            config = [
                "use_exif_size: %s" % ('no' if not self.params.use_exif_size else 'yes'),
                "feature_process_size: %s" % self.params.feature_process_size,
                "feature_min_frames: %s" % self.params.feature_min_frames,
                "processes: %s" % self.params.processes,
                "matching_gps_neighbors: %s" % self.params.matching_gps_neighbors,
                "depthmap_method: %s" % args.opensfm_depthmap_method,
                "depthmap_resolution: %s" % args.depthmap_resolution,
                "depthmap_min_patch_sd: %s" % args.opensfm_depthmap_min_patch_sd,
                "depthmap_min_consistent_views: %s" % args.opensfm_depthmap_min_consistent_views,
                "optimize_camera_parameters: %s" % ('no' if self.params.fixed_camera_params else 'yes')
            ]

            if has_alt:
                log.ODM_DEBUG("Altitude data detected, enabling it for GPS alignment")
                config.append("use_altitude_tag: yes")
                config.append("align_method: naive")
            else:
                config.append("align_method: orientation_prior")
                config.append("align_orientation_prior: vertical")

            if args.use_hybrid_bundle_adjustment:
                log.ODM_DEBUG("Enabling hybrid bundle adjustment")
                config.append("bundle_interval: 100")          # Bundle after adding 'bundle_interval' cameras
                config.append("bundle_new_points_ratio: 1.2")  # Bundle when (new points) / (bundled points) > bundle_new_points_ratio
                config.append("local_bundle_radius: 1")        # Max image graph distance for images to be included in local bundle adjustment

            if args.matcher_distance > 0:
                config.append("matching_gps_distance: %s" % self.params.matching_gps_distance)

            if tree.odm_georeferencing_gcp:
                config.append("bundle_use_gcp: yes")
                io.copy(tree.odm_georeferencing_gcp, tree.opensfm)

            # write config file
            log.ODM_DEBUG(config)
            config_filename = io.join_paths(tree.opensfm, 'config.yaml')
            with open(config_filename, 'w') as fout:
                fout.write("\n".join(config))

            # run OpenSfM reconstruction
            matched_done_file = io.join_paths(tree.opensfm, 'matching_done.txt')
            if not io.file_exists(matched_done_file) or rerun_cell:
                system.run('PYTHONPATH=%s %s/bin/opensfm extract_metadata %s' %
                           (context.pyopencv_path, context.opensfm_path, tree.opensfm))
                system.run('PYTHONPATH=%s %s/bin/opensfm detect_features %s' %
                           (context.pyopencv_path, context.opensfm_path, tree.opensfm))
                system.run('PYTHONPATH=%s %s/bin/opensfm match_features %s' %
                           (context.pyopencv_path, context.opensfm_path, tree.opensfm))
                with open(matched_done_file, 'w') as fout:
                    fout.write("Matching done!\n")
            else:
                log.ODM_WARNING('Found a feature matching done progress file in: %s' %
                                matched_done_file)

            if not io.file_exists(tree.opensfm_tracks) or rerun_cell:
                system.run('PYTHONPATH=%s %s/bin/opensfm create_tracks %s' %
                           (context.pyopencv_path, context.opensfm_path, tree.opensfm))
            else:
                log.ODM_WARNING('Found a valid OpenSfM tracks file in: %s' %
                                tree.opensfm_tracks)

            if not io.file_exists(tree.opensfm_reconstruction) or rerun_cell:
                system.run('PYTHONPATH=%s %s/bin/opensfm reconstruct %s' %
                           (context.pyopencv_path, context.opensfm_path, tree.opensfm))
            else:
                log.ODM_WARNING('Found a valid OpenSfM reconstruction file in: %s' %
                                tree.opensfm_reconstruction)

            # Check that a reconstruction file has been created
            if not io.file_exists(tree.opensfm_reconstruction):
                log.ODM_ERROR("The program could not process this dataset using the current settings. "
                                "Check that the images have enough overlap, "
                                "that there are enough recognizable features "
                                "and that the images are in focus. "
                                "You could also try to increase the --min-num-features parameter."
                                "The program will now exit.")
                sys.exit(1)


            # Always export VisualSFM's reconstruction and undistort images
            # as we'll use these for texturing (after GSD estimation and resizing)
            if not args.ignore_gsd:
                image_scale = gsd.image_scale_factor(args.orthophoto_resolution, tree.opensfm_reconstruction)
            else:
                image_scale = 1.0

            if not io.file_exists(tree.opensfm_reconstruction_nvm) or rerun_cell:
                system.run('PYTHONPATH=%s %s/bin/opensfm export_visualsfm --image_extension png --scale_focal %s %s' %
                            (context.pyopencv_path, context.opensfm_path, image_scale, tree.opensfm))
            else:
                log.ODM_WARNING('Found a valid OpenSfM NVM reconstruction file in: %s' %
                                tree.opensfm_reconstruction_nvm)

            # These will be used for texturing
            system.run('PYTHONPATH=%s %s/bin/opensfm undistort --image_format png --image_scale %s %s' %
                        (context.pyopencv_path, context.opensfm_path, image_scale, tree.opensfm))

            # Skip dense reconstruction if necessary and export
            # sparse reconstruction instead
            if args.fast_orthophoto:
                system.run('PYTHONPATH=%s %s/bin/opensfm export_ply --no-cameras %s' %
                        (context.pyopencv_path, context.opensfm_path, tree.opensfm))
            elif args.use_opensfm_dense:
                # Undistort images at full scale in JPG
                # (TODO: we could compare the size of the PNGs if they are < than depthmap_resolution
                # and use those instead of re-exporting full resolution JPGs)
                system.run('PYTHONPATH=%s %s/bin/opensfm undistort %s' %
                        (context.pyopencv_path, context.opensfm_path, tree.opensfm))
                system.run('PYTHONPATH=%s %s/bin/opensfm compute_depthmaps %s' %
                        (context.pyopencv_path, context.opensfm_path, tree.opensfm))
        else:
            log.ODM_WARNING('Found a valid OpenSfM reconstruction file in: %s' %
                            tree.opensfm_reconstruction)

        # check if reconstruction was exported to bundler before
        if not io.file_exists(tree.opensfm_bundle_list) or rerun_cell:
            # convert back to bundler's format
            system.run('PYTHONPATH=%s %s/bin/export_bundler %s' %
                       (context.pyopencv_path, context.opensfm_path, tree.opensfm))
        else:
            log.ODM_WARNING('Found a valid Bundler file in: %s' %
                            tree.opensfm_reconstruction)

        if reconstruction.georef:
            system.run('PYTHONPATH=%s %s/bin/opensfm export_geocoords %s --transformation --proj \'%s\'' %
                       (context.pyopencv_path, context.opensfm_path, tree.opensfm, reconstruction.georef.projection.srs))

        outputs.reconstruction = reconstruction

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'OpenSfM')

        log.ODM_INFO('Running ODM OpenSfM Cell - Finished')
        return ecto.OK if args.end_with != 'opensfm' else ecto.QUIT
Example #54
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM Resize Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree
        photos = self.inputs.photos

        if not photos:
            log.ODM_ERROR('Not enough photos in photos to resize')
            return ecto.QUIT

        if self.params.resize_to <= 0:
            log.ODM_ERROR('Resize parameter must be greater than 0')
            return ecto.QUIT

        # create working directory
        system.mkdir_p(tree.dataset_resize)

        log.ODM_DEBUG('Resizing dataset to: %s' % tree.dataset_resize)

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'resize') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'resize' in args.rerun_from)

        # loop over photos
        for photo in photos:
            # define image paths
            path_file = photo.path_file
            new_path_file = io.join_paths(tree.dataset_resize, photo.filename)
            # set raw image path in case we want to rerun cell
            if io.file_exists(new_path_file) and rerun_cell:
                path_file = io.join_paths(tree.dataset_raw, photo.filename)

            if not io.file_exists(new_path_file) or rerun_cell:
                # open and resize image with opencv
                img = cv2.imread(path_file)
                # compute new size
                max_side = max(img.shape[0], img.shape[1])
                if max_side <= self.params.resize_to:
                    log.ODM_WARNING('Resize Parameter is greater than the largest side of the image')
                ratio = float(self.params.resize_to) / float(max_side)
                img_r = cv2.resize(img, None, fx=ratio, fy=ratio)
                # write image with opencv
                cv2.imwrite(new_path_file, img_r)
                # read metadata with pyexiv2
                old_meta = pyexiv2.ImageMetadata(path_file)
                new_meta = pyexiv2.ImageMetadata(new_path_file)
                old_meta.read()
                new_meta.read()
                # copy metadata
                old_meta.copy(new_meta)
                # update metadata size
                new_meta['Exif.Photo.PixelXDimension'] = img_r.shape[0]
                new_meta['Exif.Photo.PixelYDimension'] = img_r.shape[1]
                new_meta.write()
                # update photos array with new values
                photo.path_file = new_path_file
                photo.width = img_r.shape[0]
                photo.height = img_r.shape[1]
                photo.update_focal()

                # log message
                log.ODM_DEBUG('Resized %s | dimensions: %s' %
                              (photo.filename, img_r.shape))
            else:
                # log message
                log.ODM_WARNING('Already resized %s | dimensions: %s x %s' %
                                (photo.filename, photo.width, photo.height))

        log.ODM_INFO('Resized %s images' % len(photos))

        # append photos to cell output
        self.outputs.photos = photos

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Resizing')

        log.ODM_INFO('Running ODM Resize Cell - Finished')
        return ecto.OK if args.end_with != 'resize' else ecto.QUIT