예제 #1
0
 def test_filtered_copy(self):
     gcp = GCPFile('tests/assets/gcp_latlon_valid.txt')
     self.assertTrue(gcp.exists())
     self.assertEqual(gcp.entries_count(), 2)
     copy = GCPFile(gcp.make_filtered_copy('tests/assets/output/filtered_copy.txt', 'tests/assets/images', min_images=1))
     self.assertTrue(copy.exists())
     self.assertEqual(copy.entries_count(), 1)
예제 #2
0
    def georeference_with_gcp(self,
                              gcp_file,
                              output_coords_file,
                              output_gcp_file,
                              rerun=False):
        if not io.file_exists(output_coords_file) or not io.file_exists(
                output_gcp_file) or rerun:
            gcp = GCPFile(gcp_file)
            if gcp.exists():
                # Create coords file, we'll be using this later
                # during georeferencing
                with open(output_coords_file, 'w') as f:
                    coords_header = gcp.wgs84_utm_zone()
                    f.write(coords_header + "\n")
                    log.ODM_DEBUG("Generated coords file from GCP: %s" %
                                  coords_header)

                # Convert GCP file to a UTM projection since the rest of the pipeline
                # does not handle other SRS well.
                rejected_entries = []
                utm_gcp = GCPFile(
                    gcp.create_utm_copy(
                        output_gcp_file,
                        filenames=[p.filename for p in self.photos],
                        rejected_entries=rejected_entries,
                        include_extras=False))

                if not utm_gcp.exists():
                    raise RuntimeError(
                        "Could not project GCP file to UTM. Please double check your GCP file for mistakes."
                    )

                for re in rejected_entries:
                    log.ODM_WARNING("GCP line ignored (image not found): %s" %
                                    str(re))

                if utm_gcp.entries_count() > 0:
                    log.ODM_INFO(
                        "%s GCP points will be used for georeferencing" %
                        utm_gcp.entries_count())
                else:
                    raise RuntimeError(
                        "A GCP file was provided, but no valid GCP entries could be used. Note that the GCP file is case sensitive (\".JPG\" is not the same as \".jpg\")."
                    )

                self.gcp = utm_gcp
            else:
                log.ODM_WARNING("GCP file does not exist: %s" % gcp_file)
                return
        else:
            log.ODM_INFO("Coordinates file already exist: %s" %
                         output_coords_file)
            log.ODM_INFO("GCP file already exist: %s" % output_gcp_file)
            self.gcp = GCPFile(output_gcp_file)

        self.georef = ODM_GeoRef.FromCoordsFile(output_coords_file)
        return self.georef
예제 #3
0
    def georeference_with_gcp(self, gcp_file, output_coords_file, output_gcp_file, output_model_txt_geo, rerun=False):
        if not io.file_exists(output_coords_file) or not io.file_exists(output_gcp_file) or rerun:
            gcp = GCPFile(gcp_file)
            if gcp.exists():
                if gcp.entries_count() == 0:
                    raise RuntimeError("This GCP file does not have any entries. Are the entries entered in the proper format?")

                # Convert GCP file to a UTM projection since the rest of the pipeline
                # does not handle other SRS well.
                rejected_entries = []
                utm_gcp = GCPFile(gcp.create_utm_copy(output_gcp_file, filenames=[p.filename for p in self.photos], rejected_entries=rejected_entries, include_extras=False))
                
                if not utm_gcp.exists():
                    raise RuntimeError("Could not project GCP file to UTM. Please double check your GCP file for mistakes.")
                
                for re in rejected_entries:
                    log.ODM_WARNING("GCP line ignored (image not found): %s" % str(re))
                
                if utm_gcp.entries_count() > 0:
                    log.ODM_INFO("%s GCP points will be used for georeferencing" % utm_gcp.entries_count())
                else:
                    raise RuntimeError("A GCP file was provided, but no valid GCP entries could be used. Note that the GCP file is case sensitive (\".JPG\" is not the same as \".jpg\").")
                
                self.gcp = utm_gcp

                # Compute RTC offsets from GCP points
                x_pos = [p.x for p in utm_gcp.iter_entries()]
                y_pos = [p.y for p in utm_gcp.iter_entries()]
                x_off, y_off = int(np.round(np.mean(x_pos))), int(np.round(np.mean(y_pos)))

                # Create coords file, we'll be using this later
                # during georeferencing
                with open(output_coords_file, 'w') as f:
                    coords_header = gcp.wgs84_utm_zone()
                    f.write(coords_header + "\n")
                    f.write("{} {}\n".format(x_off, y_off))
                    log.ODM_INFO("Generated coords file from GCP: %s" % coords_header)
                
                # Deprecated: This is mostly for backward compatibility and should be
                # be removed at some point
                shutil.copyfile(output_coords_file, output_model_txt_geo)
                log.ODM_INFO("Wrote %s" % output_model_txt_geo)
            else:
                log.ODM_WARNING("GCP file does not exist: %s" % gcp_file)
                return
        else:
            log.ODM_INFO("Coordinates file already exist: %s" % output_coords_file)
            log.ODM_INFO("GCP file already exist: %s" % output_gcp_file)
            self.gcp = GCPFile(output_gcp_file)
        
        self.georef = ODM_GeoRef.FromCoordsFile(output_coords_file)
        return self.georef
예제 #4
0
    def test_gcp_extras(self):
        gcp = GCPFile('tests/assets/gcp_extras.txt')
        self.assertEqual(gcp.get_entry(0).extras, 'gcp1')

        copy = GCPFile(gcp.create_utm_copy("tests/assets/output/gcp_utm_no_extras.txt", include_extras=False))
        self.assertTrue(copy.exists())
        self.assertEqual(copy.get_entry(0).extras, '')
예제 #5
0
 def test_utm_conversion(self):
     gcp = GCPFile("tests/assets/gcp_latlon_valid.txt")
     copy = GCPFile(gcp.create_utm_copy("tests/assets/output/gcp_utm.txt"))
     self.assertTrue(copy.exists())
     self.assertEqual(copy.raw_srs, "WGS84 UTM 16N")
     self.assertEqual(copy.get_entry(0).x, 609865.707705)
     self.assertEqual(copy.get_entry(0).y, 4950688.36182)
예제 #6
0
 def test_utm_conversion_feet(self):
     gcp = GCPFile("tests/assets/gcp_michigan_feet_valid.txt")
     copy = GCPFile(gcp.create_utm_copy("tests/assets/output/gcp_utm_z.txt"))
     self.assertTrue(copy.exists())
     self.assertEqual(copy.raw_srs, "WGS84 UTM 16N")
     self.assertEqual(round(copy.get_entry(0).x, 3), 609925.818)
     self.assertEqual(round(copy.get_entry(0).y, 3), 4950688.772)
     self.assertEqual(round(copy.get_entry(0).z, 3), 171.663)
예제 #7
0
 def test_null_gcp(self):
     gcp = GCPFile(None)
     self.assertFalse(gcp.exists())
예제 #8
0
 def test_latlon(self):
     gcp = GCPFile("tests/assets/gcp_latlon_valid.txt")
     self.assertTrue(gcp.exists())
     self.assertEqual(gcp.wgs84_utm_zone(), "WGS84 UTM 16N")
예제 #9
0
 def test_latlon_south(self):
     gcp = GCPFile("tests/assets/gcp_latlon_south.txt")
     self.assertTrue(gcp.exists())
     self.assertEqual(gcp.wgs84_utm_zone(), "WGS84 UTM 48S")
예제 #10
0
    def process(self, args, outputs):
        tree = outputs['tree']
        reconstruction = outputs['reconstruction']
        photos = reconstruction.photos

        outputs['large'] = len(photos) > args.split

        if outputs['large']:
            # If we have a cluster address, we'll use a distributed workflow
            local_workflow = not bool(args.sm_cluster)

            octx = OSFMContext(tree.opensfm)
            split_done_file = octx.path("split_done.txt")

            if not io.file_exists(split_done_file) or self.rerun():
                orig_max_concurrency = args.max_concurrency
                if not local_workflow:
                    args.max_concurrency = max(1, args.max_concurrency - 1)
                    log.ODM_INFO(
                        "Setting max-concurrency to %s to better handle remote splits"
                        % args.max_concurrency)

                log.ODM_INFO(
                    "Large dataset detected (%s photos) and split set at %s. Preparing split merge."
                    % (len(photos), args.split))
                config = [
                    "submodels_relpath: ../submodels/opensfm",
                    "submodel_relpath_template: ../submodels/submodel_%04d/opensfm",
                    "submodel_images_relpath_template: ../submodels/submodel_%04d/images",
                    "submodel_size: %s" % args.split,
                    "submodel_overlap: %s" % args.split_overlap,
                ]

                octx.setup(args,
                           tree.dataset_raw,
                           photos,
                           gcp_path=tree.odm_georeferencing_gcp,
                           append_config=config,
                           rerun=self.rerun())
                octx.extract_metadata(self.rerun())

                self.update_progress(5)

                if local_workflow:
                    octx.feature_matching(self.rerun())

                self.update_progress(20)

                # Create submodels
                if not io.dir_exists(tree.submodels_path) or self.rerun():
                    if io.dir_exists(tree.submodels_path):
                        log.ODM_WARNING(
                            "Removing existing submodels directory: %s" %
                            tree.submodels_path)
                        shutil.rmtree(tree.submodels_path)

                    octx.run("create_submodels")
                else:
                    log.ODM_WARNING(
                        "Submodels directory already exist at: %s" %
                        tree.submodels_path)

                # Find paths of all submodels
                mds = metadataset.MetaDataSet(tree.opensfm)
                submodel_paths = [
                    os.path.abspath(p) for p in mds.get_submodel_paths()
                ]

                gcp_file = GCPFile(tree.odm_georeferencing_gcp)

                for sp in submodel_paths:
                    sp_octx = OSFMContext(sp)

                    # Copy filtered GCP file if needed
                    # One in OpenSfM's directory, one in the submodel project directory
                    if gcp_file.exists():
                        submodel_gcp_file = os.path.abspath(
                            sp_octx.path("..", "gcp_list.txt"))
                        submodel_images_dir = os.path.abspath(
                            sp_octx.path("..", "images"))

                        if gcp_file.make_filtered_copy(submodel_gcp_file,
                                                       submodel_images_dir):
                            log.ODM_DEBUG("Copied filtered GCP file to %s" %
                                          submodel_gcp_file)
                            io.copy(
                                submodel_gcp_file,
                                os.path.abspath(sp_octx.path("gcp_list.txt")))
                        else:
                            log.ODM_DEBUG(
                                "No GCP will be copied for %s, not enough images in the submodel are referenced by the GCP"
                                % sp_octx.name())

                # Reconstruct each submodel
                log.ODM_INFO(
                    "Dataset has been split into %s submodels. Reconstructing each submodel..."
                    % len(submodel_paths))
                self.update_progress(25)

                if local_workflow:
                    for sp in submodel_paths:
                        log.ODM_INFO("Reconstructing %s" % sp)
                        OSFMContext(sp).reconstruct(self.rerun())
                else:
                    lre = LocalRemoteExecutor(args.sm_cluster)
                    lre.set_projects([
                        os.path.abspath(os.path.join(p, ".."))
                        for p in submodel_paths
                    ])
                    lre.run_reconstruction()

                self.update_progress(50)

                # Align
                octx.align_reconstructions(self.rerun())

                self.update_progress(55)

                # Aligned reconstruction is in reconstruction.aligned.json
                # We need to rename it to reconstruction.json
                remove_paths = []
                for sp in submodel_paths:
                    sp_octx = OSFMContext(sp)

                    aligned_recon = sp_octx.path('reconstruction.aligned.json')
                    main_recon = sp_octx.path('reconstruction.json')

                    if not io.file_exists(aligned_recon):
                        log.ODM_WARNING(
                            "Submodel %s does not have an aligned reconstruction (%s). "
                            "This could mean that the submodel could not be reconstructed "
                            " (are there enough features to reconstruct it?). Skipping."
                            % (sp_octx.name(), aligned_recon))
                        remove_paths.append(sp)
                        continue

                    if io.file_exists(main_recon):
                        os.remove(main_recon)

                    shutil.move(aligned_recon, main_recon)
                    log.ODM_DEBUG("%s is now %s" % (aligned_recon, main_recon))

                # Remove invalid submodels
                submodel_paths = [
                    p for p in submodel_paths if not p in remove_paths
                ]

                # Run ODM toolchain for each submodel
                if local_workflow:
                    for sp in submodel_paths:
                        sp_octx = OSFMContext(sp)

                        log.ODM_INFO("========================")
                        log.ODM_INFO("Processing %s" % sp_octx.name())
                        log.ODM_INFO("========================")

                        argv = get_submodel_argv(args.name,
                                                 tree.submodels_path,
                                                 sp_octx.name())

                        # Re-run the ODM toolchain on the submodel
                        system.run(" ".join(map(quote, argv)),
                                   env_vars=os.environ.copy())
                else:
                    lre.set_projects([
                        os.path.abspath(os.path.join(p, ".."))
                        for p in submodel_paths
                    ])
                    lre.run_toolchain()

                # Restore max_concurrency value
                args.max_concurrency = orig_max_concurrency

                with open(split_done_file, 'w') as fout:
                    fout.write("Split done!\n")
            else:
                log.ODM_WARNING('Found a split done file in: %s' %
                                split_done_file)
        else:
            log.ODM_INFO("Normal dataset, will process all at once.")
            self.progress = 0.0