コード例 #1
0
def run_dataset(data):
    """ Align submodel reconstructions for of MetaDataSet. """

    meta_data = metadataset.MetaDataSet(data.data_path)
    reconstruction_shots = tools.load_reconstruction_shots(meta_data)
    transformations = tools.align_reconstructions(
        reconstruction_shots, tools.partial_reconstruction_name, True)
    tools.apply_transformations(transformations)
コード例 #2
0
 def run(self, args):
     meta_data = metadataset.MetaDataSet(args.dataset)
     reconstruction_shots = tools.load_reconstruction_shots(meta_data)
     transformations = \
         tools.align_reconstructions(reconstruction_shots,
                                     tools.partial_reconstruction_name,
                                     True)
     tools.apply_transformations(transformations)
コード例 #3
0
ファイル: align_submodels.py プロジェクト: coderzbx/OpenSfM
 def run(self, args):
     meta_data = metadataset.MetaDataSet(args.dataset)
     reconstruction_shots = tools.load_reconstruction_shots(meta_data)
     transformations = \
         tools.align_reconstructions(reconstruction_shots,
                                     tools.partial_reconstruction_name,
                                     True)
     # tools.align_reconstructions(reconstruction_shots,
     #                             tools.partial_reconstruction_name,
     #                             True,
     #                             camera_constraint_type='hard_camera_constraint')
     tools.apply_transformations(transformations)
コード例 #4
0
    def align_reconstructions(self, rerun):
        alignment_file = self.path('alignment_done.txt')
        if not io.file_exists(alignment_file) or rerun:
            log.ODM_INFO("Aligning submodels...")
            meta_data = metadataset.MetaDataSet(self.opensfm_project_path)
            reconstruction_shots = tools.load_reconstruction_shots(meta_data)
            transformations = tools.align_reconstructions(reconstruction_shots, use_points_constraints=False)
            tools.apply_transformations(transformations)

            with open(alignment_file, 'w') as fout:
                fout.write("Alignment done!\n")
        else:
            log.ODM_WARNING('Found a alignment done progress file in: %s' % alignment_file)
コード例 #5
0
    def run(self, args):
        data = dataset.DataSet(args.dataset)
        meta_data = metadataset.MetaDataSet(args.dataset)

        meta_data.remove_submodels()
        data.invent_reference_lla()

        self._create_image_list(data, meta_data)
        self._cluster_images(meta_data, data.config['submodel_size'])
        self._add_cluster_neighbors(meta_data, data.config['submodel_overlap'])

        meta_data.create_submodels(meta_data.load_clusters_with_neighbors(),
                                   not data.config['submodel_use_symlinks'])
コード例 #6
0
    def run(self, args):
        data = dataset.DataSet(args.dataset)
        meta_data = metadataset.MetaDataSet(args.dataset)

        meta_data.remove_submodels()
        data.invent_reference_lla()

        self._create_image_list(data, meta_data)
        self._cluster_images(meta_data, args.size)
        self._add_cluster_neighbors(meta_data, args.dist)

        meta_data.create_submodels(
            meta_data.load_clusters_with_neighbors(), args.no_symlinks)
コード例 #7
0
    def align_reconstructions(self, rerun):
        alignment_file = self.path('alignment_done.txt')
        if not io.file_exists(alignment_file) or rerun:
            log.ODM_INFO("Aligning submodels...")
            meta_data = metadataset.MetaDataSet(self.opensfm_project_path)
            reconstruction_shots = tools.load_reconstruction_shots(meta_data)
            transformations = tools.align_reconstructions(
                reconstruction_shots, tools.partial_reconstruction_name, False)
            tools.apply_transformations(transformations)

            self.touch(alignment_file)
        else:
            log.ODM_WARNING('Found a alignment done progress file in: %s' %
                            alignment_file)
コード例 #8
0
    def run(self, args):
        data = dataset.DataSet(args.dataset)
        meta_data = metadataset.MetaDataSet(args.dataset)

        meta_data.remove_submodels()
        data.invent_reference_lla()
        self._create_image_list(data, meta_data)

        if meta_data.image_groups_exists():
            self._read_image_groups(meta_data)
        else:
            self._cluster_images(meta_data, data.config['submodel_size'])

        self._add_cluster_neighbors(meta_data, data.config['submodel_overlap'])
        self._save_clusters_geojson(meta_data)
        self._save_cluster_neighbors_geojson(meta_data)

        meta_data.create_submodels(meta_data.load_clusters_with_neighbors())
コード例 #9
0
def run_dataset(data):
    """ Split the dataset into smaller submodels. """

    meta_data = metadataset.MetaDataSet(data.data_path)

    meta_data.remove_submodels()
    data.invent_reference_lla()
    _create_image_list(data, meta_data)

    if meta_data.image_groups_exists():
        _read_image_groups(meta_data)
    else:
        _cluster_images(meta_data, data.config["submodel_size"])

    _add_cluster_neighbors(meta_data, data.config["submodel_overlap"])
    _save_clusters_geojson(meta_data)
    _save_cluster_neighbors_geojson(meta_data)

    meta_data.create_submodels(meta_data.load_clusters_with_neighbors())
コード例 #10
0
    def process(self, args, outputs):
        tree = outputs['tree']
        reconstruction = outputs['reconstruction']
        photos = reconstruction.photos

        outputs['large'] = len(photos) > args.split

        if outputs['large']:
            # If we have a cluster address, we'll use a distributed workflow
            local_workflow = not bool(args.sm_cluster)

            octx = OSFMContext(tree.opensfm)
            split_done_file = octx.path("split_done.txt")

            if not io.file_exists(split_done_file) or self.rerun():
                orig_max_concurrency = args.max_concurrency
                if not local_workflow:
                    args.max_concurrency = max(1, args.max_concurrency - 1)
                    log.ODM_INFO("Setting max-concurrency to %s to better handle remote splits" % args.max_concurrency)

                log.ODM_INFO("Large dataset detected (%s photos) and split set at %s. Preparing split merge." % (
                    len(photos), args.split))
                config = [
                    "submodels_relpath: ../submodels/opensfm",
                    "submodel_relpath_template: ../submodels/submodel_%04d/opensfm",
                    "submodel_images_relpath_template: ../submodels/submodel_%04d/images",
                    "submodel_size: %s" % args.split,
                    "submodel_overlap: %s" % args.split_overlap,
                ]

                octx.setup(args, tree.dataset_raw, reconstruction=reconstruction, append_config=config,
                           rerun=self.rerun())
                octx.extract_metadata(self.rerun())

                self.update_progress(5)

                if local_workflow:
                    octx.feature_matching(self.rerun())

                self.update_progress(20)

                # Create submodels
                if not io.dir_exists(tree.submodels_path) or self.rerun():
                    if io.dir_exists(tree.submodels_path):
                        log.ODM_WARNING("Removing existing submodels directory: %s" % tree.submodels_path)
                        shutil.rmtree(tree.submodels_path)

                    octx.run("create_submodels")
                else:
                    log.ODM_WARNING("Submodels directory already exist at: %s" % tree.submodels_path)

                # Find paths of all submodels
                mds = metadataset.MetaDataSet(tree.opensfm)
                submodel_paths = [os.path.abspath(p) for p in mds.get_submodel_paths()]

                for sp in submodel_paths:
                    sp_octx = OSFMContext(sp)

                    # Copy filtered GCP file if needed
                    # One in OpenSfM's directory, one in the submodel project directory
                    if reconstruction.gcp and reconstruction.gcp.exists():
                        submodel_gcp_file = os.path.abspath(sp_octx.path("..", "gcp_list.txt"))
                        submodel_images_dir = os.path.abspath(sp_octx.path("..", "images"))

                        if reconstruction.gcp.make_filtered_copy(submodel_gcp_file, submodel_images_dir):
                            log.ODM_INFO("Copied filtered GCP file to %s" % submodel_gcp_file)
                            io.copy(submodel_gcp_file, os.path.abspath(sp_octx.path("gcp_list.txt")))
                        else:
                            log.ODM_INFO(
                                "No GCP will be copied for %s, not enough images in the submodel are referenced by the GCP" % sp_octx.name())

                # Reconstruct each submodel
                log.ODM_INFO(
                    "Dataset has been split into %s submodels. Reconstructing each submodel..." % len(submodel_paths))
                self.update_progress(25)

                if local_workflow:
                    for sp in submodel_paths:
                        log.ODM_INFO("Reconstructing %s" % sp)
                        OSFMContext(sp).reconstruct(self.rerun())
                else:
                    lre = LocalRemoteExecutor(args.sm_cluster, self.rerun())
                    lre.set_projects([os.path.abspath(os.path.join(p, "..")) for p in submodel_paths])
                    lre.run_reconstruction()

                self.update_progress(50)

                # TODO: this is currently not working and needs a champion to fix it
                # https://community.opendronemap.org/t/filenotfound-error-cameras-json/6047/2

                # resplit_done_file = octx.path('resplit_done.txt')
                # if not io.file_exists(resplit_done_file) and bool(args.split_multitracks):
                #     submodels = mds.get_submodel_paths()
                #     i = 0
                #     for s in submodels:
                #         template = octx.path("../aligned_submodels/submodel_%04d")
                #         with open(s+"/reconstruction.json", "r") as f:
                #             j = json.load(f)
                #         for k in range(0, len(j)):
                #             v = j[k]
                #             path = template % i

                #             #Create the submodel path up to opensfm
                #             os.makedirs(path+"/opensfm")
                #             os.makedirs(path+"/images")

                #             #symlinks for common data
                #             images = os.listdir(octx.path("../images"))
                #             for image in images:
                #                 os.symlink("../../../images/"+image, path+"/images/"+image)
                #             os.symlink("../../../opensfm/exif", path+"/opensfm/exif")
                #             os.symlink("../../../opensfm/features", path+"/opensfm/features")
                #             os.symlink("../../../opensfm/matches", path+"/opensfm/matches")
                #             os.symlink("../../../opensfm/reference_lla.json", path+"/opensfm/reference_lla.json")
                #             os.symlink("../../../opensfm/camera_models.json", path+"/opensfm/camera_models.json")

                #             shutil.copy(s+"/../cameras.json", path+"/cameras.json")

                #             shutil.copy(s+"/../images.json", path+"/images.json")

                #             with open(octx.path("config.yaml")) as f:
                #                 doc = yaml.safe_load(f)

                #             dmcv = "depthmap_min_consistent_views"
                #             if dmcv in doc:
                #                 if len(v["shots"]) < doc[dmcv]:
                #                     doc[dmcv] = len(v["shots"])
                #                     print("WARNING: Reduced "+dmcv+" to accommodate short track")

                #             with open(path+"/opensfm/config.yaml", "w") as f:
                #                 yaml.dump(doc, f)

                #             #We need the original tracks file for the visualsfm export, since
                #             #there may still be point matches between the tracks
                #             shutil.copy(s+"/tracks.csv", path+"/opensfm/tracks.csv")

                #             #Create our new reconstruction file with only the relevant track
                #             with open(path+"/opensfm/reconstruction.json", "w") as o:
                #                 json.dump([v], o)

                #             #Create image lists
                #             with open(path+"/opensfm/image_list.txt", "w") as o:
                #                 o.writelines(list(map(lambda x: "../images/"+x+'\n', v["shots"].keys())))
                #             with open(path+"/img_list.txt", "w") as o:
                #                 o.writelines(list(map(lambda x: x+'\n', v["shots"].keys())))

                #             i+=1
                #     os.rename(octx.path("../submodels"), octx.path("../unaligned_submodels"))
                #     os.rename(octx.path("../aligned_submodels"), octx.path("../submodels"))
                #     octx.touch(resplit_done_file)

                mds = metadataset.MetaDataSet(tree.opensfm)
                submodel_paths = [os.path.abspath(p) for p in mds.get_submodel_paths()]

                # Align
                octx.align_reconstructions(self.rerun())

                self.update_progress(55)

                # Aligned reconstruction is in reconstruction.aligned.json
                # We need to rename it to reconstruction.json
                remove_paths = []
                for sp in submodel_paths:
                    sp_octx = OSFMContext(sp)

                    aligned_recon = sp_octx.path('reconstruction.aligned.json')
                    unaligned_recon = sp_octx.path('reconstruction.unaligned.json')
                    main_recon = sp_octx.path('reconstruction.json')

                    if io.file_exists(main_recon) and io.file_exists(unaligned_recon) and not self.rerun():
                        log.ODM_INFO("Submodel %s has already been aligned." % sp_octx.name())
                        continue

                    if not io.file_exists(aligned_recon):
                        log.ODM_WARNING("Submodel %s does not have an aligned reconstruction (%s). "
                                        "This could mean that the submodel could not be reconstructed "
                                        " (are there enough features to reconstruct it?). Skipping." % (
                                            sp_octx.name(), aligned_recon))
                        remove_paths.append(sp)
                        continue

                    if io.file_exists(main_recon):
                        shutil.move(main_recon, unaligned_recon)

                    shutil.move(aligned_recon, main_recon)
                    log.ODM_INFO("%s is now %s" % (aligned_recon, main_recon))

                # Remove invalid submodels
                submodel_paths = [p for p in submodel_paths if not p in remove_paths]

                # Run ODM toolchain for each submodel
                if local_workflow:
                    for sp in submodel_paths:
                        sp_octx = OSFMContext(sp)

                        log.ODM_INFO("========================")
                        log.ODM_INFO("Processing %s" % sp_octx.name())
                        log.ODM_INFO("========================")

                        argv = get_submodel_argv(args, tree.submodels_path, sp_octx.name())

                        # Re-run the ODM toolchain on the submodel
                        system.run(" ".join(map(quote, map(str, argv))), env_vars=os.environ.copy())
                else:
                    lre.set_projects([os.path.abspath(os.path.join(p, "..")) for p in submodel_paths])
                    lre.run_toolchain()

                # Restore max_concurrency value
                args.max_concurrency = orig_max_concurrency

                octx.touch(split_done_file)
            else:
                log.ODM_WARNING('Found a split done file in: %s' % split_done_file)
        else:
            log.ODM_INFO("Normal dataset, will process all at once.")
            self.progress = 0.0
コード例 #11
0
ファイル: run_reconstructions.py プロジェクト: ywyue/ODM
    def _set_matching_done(self, submodel_path):
        """Tell ODM's opensfm not to rerun matching."""
        matching_done_file = os.path.join(submodel_path, 'matching_done.txt')
        with open(matching_done_file, 'w') as fout:
            fout.write("Matching done!\n")


if __name__ == "__main__":
    parser = argparse.ArgumentParser(description='Reconstruct all submodels')
    parser.add_argument('dataset', help='path to the dataset to be processed')
    parser.add_argument('--run-matching',
                        help='Run matching for each submodel',
                        action='store_true')
    args = parser.parse_args()

    path = os.path.join(args.dataset, 'opensfm')
    meta_data = metadataset.MetaDataSet(path)
    command = os.path.join(context.opensfm_path, 'bin', 'opensfm')

    submodel_paths = meta_data.get_submodel_paths()
    reconstructor = Reconstructor(command, args.run_matching)

    processes = meta_data.config['processes']
    if processes == 1:
        for submodel_path in submodel_paths:
            reconstructor(submodel_path)
    else:
        p = multiprocessing.Pool(processes)
        p.map(reconstructor, submodel_paths)
コード例 #12
0
    def process(self, args, outputs):
        tree = outputs['tree']
        reconstruction = outputs['reconstruction']
        photos = reconstruction.photos

        outputs['large'] = len(photos) > args.split

        if outputs['large']:
            # If we have a cluster address, we'll use a distributed workflow
            local_workflow = not bool(args.sm_cluster)

            octx = OSFMContext(tree.opensfm)
            split_done_file = octx.path("split_done.txt")

            if not io.file_exists(split_done_file) or self.rerun():
                orig_max_concurrency = args.max_concurrency
                if not local_workflow:
                    args.max_concurrency = max(1, args.max_concurrency - 1)
                    log.ODM_INFO(
                        "Setting max-concurrency to %s to better handle remote splits"
                        % args.max_concurrency)

                log.ODM_INFO(
                    "Large dataset detected (%s photos) and split set at %s. Preparing split merge."
                    % (len(photos), args.split))
                config = [
                    "submodels_relpath: ../submodels/opensfm",
                    "submodel_relpath_template: ../submodels/submodel_%04d/opensfm",
                    "submodel_images_relpath_template: ../submodels/submodel_%04d/images",
                    "submodel_size: %s" % args.split,
                    "submodel_overlap: %s" % args.split_overlap,
                ]

                octx.setup(args,
                           tree.dataset_raw,
                           photos,
                           reconstruction=reconstruction,
                           append_config=config,
                           rerun=self.rerun())
                octx.extract_metadata(self.rerun())

                self.update_progress(5)

                if local_workflow:
                    octx.feature_matching(self.rerun())

                self.update_progress(20)

                # Create submodels
                if not io.dir_exists(tree.submodels_path) or self.rerun():
                    if io.dir_exists(tree.submodels_path):
                        log.ODM_WARNING(
                            "Removing existing submodels directory: %s" %
                            tree.submodels_path)
                        shutil.rmtree(tree.submodels_path)

                    octx.run("create_submodels")
                else:
                    log.ODM_WARNING(
                        "Submodels directory already exist at: %s" %
                        tree.submodels_path)

                # Find paths of all submodels
                mds = metadataset.MetaDataSet(tree.opensfm)
                submodel_paths = [
                    os.path.abspath(p) for p in mds.get_submodel_paths()
                ]

                for sp in submodel_paths:
                    sp_octx = OSFMContext(sp)

                    # Copy filtered GCP file if needed
                    # One in OpenSfM's directory, one in the submodel project directory
                    if reconstruction.gcp and reconstruction.gcp.exists():
                        submodel_gcp_file = os.path.abspath(
                            sp_octx.path("..", "gcp_list.txt"))
                        submodel_images_dir = os.path.abspath(
                            sp_octx.path("..", "images"))

                        if reconstruction.gcp.make_filtered_copy(
                                submodel_gcp_file, submodel_images_dir):
                            log.ODM_INFO("Copied filtered GCP file to %s" %
                                         submodel_gcp_file)
                            io.copy(
                                submodel_gcp_file,
                                os.path.abspath(sp_octx.path("gcp_list.txt")))
                        else:
                            log.ODM_INFO(
                                "No GCP will be copied for %s, not enough images in the submodel are referenced by the GCP"
                                % sp_octx.name())

                # Reconstruct each submodel
                log.ODM_INFO(
                    "Dataset has been split into %s submodels. Reconstructing each submodel..."
                    % len(submodel_paths))
                self.update_progress(25)

                if local_workflow:
                    for sp in submodel_paths:
                        log.ODM_INFO("Reconstructing %s" % sp)
                        OSFMContext(sp).reconstruct(self.rerun())
                else:
                    lre = LocalRemoteExecutor(args.sm_cluster, self.rerun())
                    lre.set_projects([
                        os.path.abspath(os.path.join(p, ".."))
                        for p in submodel_paths
                    ])
                    lre.run_reconstruction()

                self.update_progress(50)

                # Align
                octx.align_reconstructions(self.rerun())

                self.update_progress(55)

                # Aligned reconstruction is in reconstruction.aligned.json
                # We need to rename it to reconstruction.json
                remove_paths = []
                for sp in submodel_paths:
                    sp_octx = OSFMContext(sp)

                    aligned_recon = sp_octx.path('reconstruction.aligned.json')
                    unaligned_recon = sp_octx.path(
                        'reconstruction.unaligned.json')
                    main_recon = sp_octx.path('reconstruction.json')

                    if io.file_exists(main_recon) and io.file_exists(
                            unaligned_recon) and not self.rerun():
                        log.ODM_INFO("Submodel %s has already been aligned." %
                                     sp_octx.name())
                        continue

                    if not io.file_exists(aligned_recon):
                        log.ODM_WARNING(
                            "Submodel %s does not have an aligned reconstruction (%s). "
                            "This could mean that the submodel could not be reconstructed "
                            " (are there enough features to reconstruct it?). Skipping."
                            % (sp_octx.name(), aligned_recon))
                        remove_paths.append(sp)
                        continue

                    if io.file_exists(main_recon):
                        shutil.move(main_recon, unaligned_recon)

                    shutil.move(aligned_recon, main_recon)
                    log.ODM_INFO("%s is now %s" % (aligned_recon, main_recon))

                # Remove invalid submodels
                submodel_paths = [
                    p for p in submodel_paths if not p in remove_paths
                ]

                # Run ODM toolchain for each submodel
                if local_workflow:
                    for sp in submodel_paths:
                        sp_octx = OSFMContext(sp)

                        log.ODM_INFO("========================")
                        log.ODM_INFO("Processing %s" % sp_octx.name())
                        log.ODM_INFO("========================")

                        argv = get_submodel_argv(args, tree.submodels_path,
                                                 sp_octx.name())

                        # Re-run the ODM toolchain on the submodel
                        system.run(" ".join(map(quote, argv)),
                                   env_vars=os.environ.copy())
                else:
                    lre.set_projects([
                        os.path.abspath(os.path.join(p, ".."))
                        for p in submodel_paths
                    ])
                    lre.run_toolchain()

                # Restore max_concurrency value
                args.max_concurrency = orig_max_concurrency

                octx.touch(split_done_file)
            else:
                log.ODM_WARNING('Found a split done file in: %s' %
                                split_done_file)
        else:
            log.ODM_INFO("Normal dataset, will process all at once.")
            self.progress = 0.0
コード例 #13
0
            raise RuntimeError(result)


if __name__ == "__main__":
    parser = argparse.ArgumentParser(description='Reconstruct all submodels')
    parser.add_argument('dataset', help='path to the dataset to be processed')
    parser.add_argument('-c',
                        '--complete',
                        help='Run the complete pipeline on each subset',
                        action='store_true')
    parser.add_argument('-p',
                        '--processes',
                        help='Number of parallel processes to run',
                        type=int,
                        default=1)
    args = parser.parse_args()

    meta_data = metadataset.MetaDataSet(args.dataset)
    exec_dir = os.path.join(os.getcwd(), os.path.dirname(sys.argv[0]))
    command = os.path.join(exec_dir, "bin/opensfm")

    submodel_paths = meta_data.get_submodel_paths()
    reconstructor = Reconstructor(command, args.complete)

    if args.processes == 1:
        for submodel_path in submodel_paths:
            reconstructor(submodel_path)
    else:
        p = multiprocessing.Pool(args.processes)
        p.map(reconstructor, submodel_paths)