def process(self, args, outputs): tree = outputs['tree'] reconstruction = outputs['reconstruction'] photos = reconstruction.photos outputs['large'] = len(photos) > args.split if outputs['large']: # If we have a cluster address, we'll use a distributed workflow local_workflow = not bool(args.sm_cluster) octx = OSFMContext(tree.opensfm) split_done_file = octx.path("split_done.txt") if not io.file_exists(split_done_file) or self.rerun(): orig_max_concurrency = args.max_concurrency if not local_workflow: args.max_concurrency = max(1, args.max_concurrency - 1) log.ODM_INFO("Setting max-concurrency to %s to better handle remote splits" % args.max_concurrency) log.ODM_INFO("Large dataset detected (%s photos) and split set at %s. Preparing split merge." % ( len(photos), args.split)) config = [ "submodels_relpath: ../submodels/opensfm", "submodel_relpath_template: ../submodels/submodel_%04d/opensfm", "submodel_images_relpath_template: ../submodels/submodel_%04d/images", "submodel_size: %s" % args.split, "submodel_overlap: %s" % args.split_overlap, ] octx.setup(args, tree.dataset_raw, reconstruction=reconstruction, append_config=config, rerun=self.rerun()) octx.extract_metadata(self.rerun()) self.update_progress(5) if local_workflow: octx.feature_matching(self.rerun()) self.update_progress(20) # Create submodels if not io.dir_exists(tree.submodels_path) or self.rerun(): if io.dir_exists(tree.submodels_path): log.ODM_WARNING("Removing existing submodels directory: %s" % tree.submodels_path) shutil.rmtree(tree.submodels_path) octx.run("create_submodels") else: log.ODM_WARNING("Submodels directory already exist at: %s" % tree.submodels_path) # Find paths of all submodels mds = metadataset.MetaDataSet(tree.opensfm) submodel_paths = [os.path.abspath(p) for p in mds.get_submodel_paths()] for sp in submodel_paths: sp_octx = OSFMContext(sp) # Copy filtered GCP file if needed # One in OpenSfM's directory, one in the submodel project directory if reconstruction.gcp and reconstruction.gcp.exists(): submodel_gcp_file = os.path.abspath(sp_octx.path("..", "gcp_list.txt")) submodel_images_dir = os.path.abspath(sp_octx.path("..", "images")) if reconstruction.gcp.make_filtered_copy(submodel_gcp_file, submodel_images_dir): log.ODM_INFO("Copied filtered GCP file to %s" % submodel_gcp_file) io.copy(submodel_gcp_file, os.path.abspath(sp_octx.path("gcp_list.txt"))) else: log.ODM_INFO( "No GCP will be copied for %s, not enough images in the submodel are referenced by the GCP" % sp_octx.name()) # Reconstruct each submodel log.ODM_INFO( "Dataset has been split into %s submodels. Reconstructing each submodel..." % len(submodel_paths)) self.update_progress(25) if local_workflow: for sp in submodel_paths: log.ODM_INFO("Reconstructing %s" % sp) OSFMContext(sp).reconstruct(self.rerun()) else: lre = LocalRemoteExecutor(args.sm_cluster, self.rerun()) lre.set_projects([os.path.abspath(os.path.join(p, "..")) for p in submodel_paths]) lre.run_reconstruction() self.update_progress(50) # TODO: this is currently not working and needs a champion to fix it # https://community.opendronemap.org/t/filenotfound-error-cameras-json/6047/2 # resplit_done_file = octx.path('resplit_done.txt') # if not io.file_exists(resplit_done_file) and bool(args.split_multitracks): # submodels = mds.get_submodel_paths() # i = 0 # for s in submodels: # template = octx.path("../aligned_submodels/submodel_%04d") # with open(s+"/reconstruction.json", "r") as f: # j = json.load(f) # for k in range(0, len(j)): # v = j[k] # path = template % i # #Create the submodel path up to opensfm # os.makedirs(path+"/opensfm") # os.makedirs(path+"/images") # #symlinks for common data # images = os.listdir(octx.path("../images")) # for image in images: # os.symlink("../../../images/"+image, path+"/images/"+image) # os.symlink("../../../opensfm/exif", path+"/opensfm/exif") # os.symlink("../../../opensfm/features", path+"/opensfm/features") # os.symlink("../../../opensfm/matches", path+"/opensfm/matches") # os.symlink("../../../opensfm/reference_lla.json", path+"/opensfm/reference_lla.json") # os.symlink("../../../opensfm/camera_models.json", path+"/opensfm/camera_models.json") # shutil.copy(s+"/../cameras.json", path+"/cameras.json") # shutil.copy(s+"/../images.json", path+"/images.json") # with open(octx.path("config.yaml")) as f: # doc = yaml.safe_load(f) # dmcv = "depthmap_min_consistent_views" # if dmcv in doc: # if len(v["shots"]) < doc[dmcv]: # doc[dmcv] = len(v["shots"]) # print("WARNING: Reduced "+dmcv+" to accommodate short track") # with open(path+"/opensfm/config.yaml", "w") as f: # yaml.dump(doc, f) # #We need the original tracks file for the visualsfm export, since # #there may still be point matches between the tracks # shutil.copy(s+"/tracks.csv", path+"/opensfm/tracks.csv") # #Create our new reconstruction file with only the relevant track # with open(path+"/opensfm/reconstruction.json", "w") as o: # json.dump([v], o) # #Create image lists # with open(path+"/opensfm/image_list.txt", "w") as o: # o.writelines(list(map(lambda x: "../images/"+x+'\n', v["shots"].keys()))) # with open(path+"/img_list.txt", "w") as o: # o.writelines(list(map(lambda x: x+'\n', v["shots"].keys()))) # i+=1 # os.rename(octx.path("../submodels"), octx.path("../unaligned_submodels")) # os.rename(octx.path("../aligned_submodels"), octx.path("../submodels")) # octx.touch(resplit_done_file) mds = metadataset.MetaDataSet(tree.opensfm) submodel_paths = [os.path.abspath(p) for p in mds.get_submodel_paths()] # Align octx.align_reconstructions(self.rerun()) self.update_progress(55) # Aligned reconstruction is in reconstruction.aligned.json # We need to rename it to reconstruction.json remove_paths = [] for sp in submodel_paths: sp_octx = OSFMContext(sp) aligned_recon = sp_octx.path('reconstruction.aligned.json') unaligned_recon = sp_octx.path('reconstruction.unaligned.json') main_recon = sp_octx.path('reconstruction.json') if io.file_exists(main_recon) and io.file_exists(unaligned_recon) and not self.rerun(): log.ODM_INFO("Submodel %s has already been aligned." % sp_octx.name()) continue if not io.file_exists(aligned_recon): log.ODM_WARNING("Submodel %s does not have an aligned reconstruction (%s). " "This could mean that the submodel could not be reconstructed " " (are there enough features to reconstruct it?). Skipping." % ( sp_octx.name(), aligned_recon)) remove_paths.append(sp) continue if io.file_exists(main_recon): shutil.move(main_recon, unaligned_recon) shutil.move(aligned_recon, main_recon) log.ODM_INFO("%s is now %s" % (aligned_recon, main_recon)) # Remove invalid submodels submodel_paths = [p for p in submodel_paths if not p in remove_paths] # Run ODM toolchain for each submodel if local_workflow: for sp in submodel_paths: sp_octx = OSFMContext(sp) log.ODM_INFO("========================") log.ODM_INFO("Processing %s" % sp_octx.name()) log.ODM_INFO("========================") argv = get_submodel_argv(args, tree.submodels_path, sp_octx.name()) # Re-run the ODM toolchain on the submodel system.run(" ".join(map(quote, map(str, argv))), env_vars=os.environ.copy()) else: lre.set_projects([os.path.abspath(os.path.join(p, "..")) for p in submodel_paths]) lre.run_toolchain() # Restore max_concurrency value args.max_concurrency = orig_max_concurrency octx.touch(split_done_file) else: log.ODM_WARNING('Found a split done file in: %s' % split_done_file) else: log.ODM_INFO("Normal dataset, will process all at once.") self.progress = 0.0
def process(self, args, outputs): tree = outputs['tree'] reconstruction = outputs['reconstruction'] photos = reconstruction.photos outputs['large'] = len(photos) > args.split if outputs['large']: # If we have a cluster address, we'll use a distributed workflow local_workflow = not bool(args.sm_cluster) octx = OSFMContext(tree.opensfm) split_done_file = octx.path("split_done.txt") if not io.file_exists(split_done_file) or self.rerun(): orig_max_concurrency = args.max_concurrency if not local_workflow: args.max_concurrency = max(1, args.max_concurrency - 1) log.ODM_INFO( "Setting max-concurrency to %s to better handle remote splits" % args.max_concurrency) log.ODM_INFO( "Large dataset detected (%s photos) and split set at %s. Preparing split merge." % (len(photos), args.split)) config = [ "submodels_relpath: ../submodels/opensfm", "submodel_relpath_template: ../submodels/submodel_%04d/opensfm", "submodel_images_relpath_template: ../submodels/submodel_%04d/images", "submodel_size: %s" % args.split, "submodel_overlap: %s" % args.split_overlap, ] octx.setup(args, tree.dataset_raw, photos, reconstruction=reconstruction, append_config=config, rerun=self.rerun()) octx.extract_metadata(self.rerun()) self.update_progress(5) if local_workflow: octx.feature_matching(self.rerun()) self.update_progress(20) # Create submodels if not io.dir_exists(tree.submodels_path) or self.rerun(): if io.dir_exists(tree.submodels_path): log.ODM_WARNING( "Removing existing submodels directory: %s" % tree.submodels_path) shutil.rmtree(tree.submodels_path) octx.run("create_submodels") else: log.ODM_WARNING( "Submodels directory already exist at: %s" % tree.submodels_path) # Find paths of all submodels mds = metadataset.MetaDataSet(tree.opensfm) submodel_paths = [ os.path.abspath(p) for p in mds.get_submodel_paths() ] for sp in submodel_paths: sp_octx = OSFMContext(sp) # Copy filtered GCP file if needed # One in OpenSfM's directory, one in the submodel project directory if reconstruction.gcp and reconstruction.gcp.exists(): submodel_gcp_file = os.path.abspath( sp_octx.path("..", "gcp_list.txt")) submodel_images_dir = os.path.abspath( sp_octx.path("..", "images")) if reconstruction.gcp.make_filtered_copy( submodel_gcp_file, submodel_images_dir): log.ODM_INFO("Copied filtered GCP file to %s" % submodel_gcp_file) io.copy( submodel_gcp_file, os.path.abspath(sp_octx.path("gcp_list.txt"))) else: log.ODM_INFO( "No GCP will be copied for %s, not enough images in the submodel are referenced by the GCP" % sp_octx.name()) # Reconstruct each submodel log.ODM_INFO( "Dataset has been split into %s submodels. Reconstructing each submodel..." % len(submodel_paths)) self.update_progress(25) if local_workflow: for sp in submodel_paths: log.ODM_INFO("Reconstructing %s" % sp) OSFMContext(sp).reconstruct(self.rerun()) else: lre = LocalRemoteExecutor(args.sm_cluster, self.rerun()) lre.set_projects([ os.path.abspath(os.path.join(p, "..")) for p in submodel_paths ]) lre.run_reconstruction() self.update_progress(50) # Align octx.align_reconstructions(self.rerun()) self.update_progress(55) # Aligned reconstruction is in reconstruction.aligned.json # We need to rename it to reconstruction.json remove_paths = [] for sp in submodel_paths: sp_octx = OSFMContext(sp) aligned_recon = sp_octx.path('reconstruction.aligned.json') unaligned_recon = sp_octx.path( 'reconstruction.unaligned.json') main_recon = sp_octx.path('reconstruction.json') if io.file_exists(main_recon) and io.file_exists( unaligned_recon) and not self.rerun(): log.ODM_INFO("Submodel %s has already been aligned." % sp_octx.name()) continue if not io.file_exists(aligned_recon): log.ODM_WARNING( "Submodel %s does not have an aligned reconstruction (%s). " "This could mean that the submodel could not be reconstructed " " (are there enough features to reconstruct it?). Skipping." % (sp_octx.name(), aligned_recon)) remove_paths.append(sp) continue if io.file_exists(main_recon): shutil.move(main_recon, unaligned_recon) shutil.move(aligned_recon, main_recon) log.ODM_INFO("%s is now %s" % (aligned_recon, main_recon)) # Remove invalid submodels submodel_paths = [ p for p in submodel_paths if not p in remove_paths ] # Run ODM toolchain for each submodel if local_workflow: for sp in submodel_paths: sp_octx = OSFMContext(sp) log.ODM_INFO("========================") log.ODM_INFO("Processing %s" % sp_octx.name()) log.ODM_INFO("========================") argv = get_submodel_argv(args, tree.submodels_path, sp_octx.name()) # Re-run the ODM toolchain on the submodel system.run(" ".join(map(quote, argv)), env_vars=os.environ.copy()) else: lre.set_projects([ os.path.abspath(os.path.join(p, "..")) for p in submodel_paths ]) lre.run_toolchain() # Restore max_concurrency value args.max_concurrency = orig_max_concurrency octx.touch(split_done_file) else: log.ODM_WARNING('Found a split done file in: %s' % split_done_file) else: log.ODM_INFO("Normal dataset, will process all at once.") self.progress = 0.0