def configure(self, p, _i, _o): tree = types.ODM_Tree(p.args.project_path, p.args.images, p.args.gcp) self.tree = ecto.Constant(value=tree) # TODO(dakota) put this somewhere better maybe if p.args.time and io.file_exists(tree.benchmarking): # Delete the previously made file os.remove(tree.benchmarking) with open(tree.benchmarking, 'a') as b: b.write( 'ODM Benchmarking file created %s\nNumber of Cores: %s\n\n' % (system.now(), context.num_cores))
def process(self, args, outputs): outputs['start_time'] = system.now_raw() tree = types.ODM_Tree(args.project_path, args.gcp, args.geo) outputs['tree'] = tree if args.time and io.file_exists(tree.benchmarking): # Delete the previously made file os.remove(tree.benchmarking) with open(tree.benchmarking, 'a') as b: b.write( 'ODM Benchmarking file created %s\nNumber of Cores: %s\n\n' % (system.now(), context.num_cores)) # check if the image filename is supported def valid_image_filename(filename): (pathfn, ext) = os.path.splitext(filename) return ext.lower( ) in context.supported_extensions and pathfn[-5:] != "_mask" # Get supported images from dir def get_images(in_dir): log.ODM_DEBUG(in_dir) entries = os.listdir(in_dir) valid, rejects = [], [] for f in entries: if valid_image_filename(f): valid.append(f) else: rejects.append(f) return valid, rejects def find_mask(photo_path, masks): (pathfn, ext) = os.path.splitext(os.path.basename(photo_path)) k = "{}_mask".format(pathfn) mask = masks.get(k) if mask: # Spaces are not supported due to OpenSfM's mask_list.txt format reqs if not " " in mask: return mask else: log.ODM_WARNING( "Image mask {} has a space. Spaces are currently not supported for image masks." .format(mask)) # get images directory images_dir = tree.dataset_raw # define paths and create working directories system.mkdir_p(tree.odm_georeferencing) log.ODM_INFO('Loading dataset from: %s' % images_dir) # check if we rerun cell or not images_database_file = os.path.join(tree.root_path, 'images.json') if not io.file_exists(images_database_file) or self.rerun(): if not os.path.exists(images_dir): log.ODM_ERROR( "There are no images in %s! Make sure that your project path and dataset name is correct. The current is set to: %s" % (images_dir, args.project_path)) exit(1) files, rejects = get_images(images_dir) if files: # create ODMPhoto list path_files = [os.path.join(images_dir, f) for f in files] # Lookup table for masks masks = {} for r in rejects: (p, ext) = os.path.splitext(r) if p[-5:] == "_mask" and ext.lower( ) in context.supported_extensions: masks[p] = r photos = [] with open(tree.dataset_list, 'w') as dataset_list: log.ODM_INFO("Loading %s images" % len(path_files)) for f in path_files: p = types.ODM_Photo(f) p.set_mask(find_mask(f, masks)) photos += [p] dataset_list.write(photos[-1].filename + '\n') # Check if a geo file is available if tree.odm_geo_file is not None and os.path.exists( tree.odm_geo_file): log.ODM_INFO("Found image geolocation file") gf = GeoFile(tree.odm_geo_file) updated = 0 for p in photos: entry = gf.get_entry(p.filename) if entry: p.update_with_geo_entry(entry) updated += 1 log.ODM_INFO("Updated %s image positions" % updated) # Save image database for faster restart save_images_database(photos, images_database_file) else: log.ODM_ERROR('Not enough supported images in %s' % images_dir) exit(1) else: # We have an images database, just load it photos = load_images_database(images_database_file) log.ODM_INFO('Found %s usable images' % len(photos)) # Create reconstruction object reconstruction = types.ODM_Reconstruction(photos) if tree.odm_georeferencing_gcp and not args.use_exif: reconstruction.georeference_with_gcp( tree.odm_georeferencing_gcp, tree.odm_georeferencing_coords, tree.odm_georeferencing_gcp_utm, tree.odm_georeferencing_model_txt_geo, rerun=self.rerun()) else: reconstruction.georeference_with_gps( tree.dataset_raw, tree.odm_georeferencing_coords, tree.odm_georeferencing_model_txt_geo, rerun=self.rerun()) reconstruction.save_proj_srs( os.path.join(tree.odm_georeferencing, tree.odm_georeferencing_proj)) outputs['reconstruction'] = reconstruction
def stitch(self, connector, resource): self.logger.debug('Initializing OpenDroneMap app - %s' % system.now()) # If user asks to rerun everything, delete all of the existing progress directories. # TODO: Move this somewhere it's not hard-coded. Alternatively remove everything # we don't create if self.opendrone_args.rerun_all: os.system("rm -rf " + self.opendrone_args.project_path + "images_resize/ " + self.opendrone_args.project_path + "odm_georeferencing/ " + self.opendrone_args.project_path + "odm_meshing/ " + self.opendrone_args.project_path + "odm_orthophoto/ " + self.opendrone_args.project_path + "odm_texturing/ " + self.opendrone_args.project_path + "opensfm/ " + self.opendrone_args.project_path + "pmvs/") # create an instance of my App BlackBox # internally configure all tasks connector.status_update(StatusMessage.processing, resource, "Creating ODMApp.") settingsfilepath = os.path.join(self.opendrone_args.project_path, "settings.yaml") with open(settingsfilepath, 'w') as out_f: odm_args = vars(self.opendrone_args) for key in odm_args: if not odm_args[key] is None: out_f.write(str(key) + " : " + str(odm_args[key]) + "\n") proc = None try: my_env = os.environ.copy() my_env["ODM_SETTINGS"] = settingsfilepath my_path = os.path.dirname(os.path.realpath(__file__)) if not my_path: my_path = "." script_path = os.path.join(my_path, "worker.py") proc = subprocess.Popen([script_path, "code"], bufsize=-1, env=my_env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding="UTF-8") except Exception as ex: connector.status_update(StatusMessage.processing, resource, "Exception: " + str(ex)) self.logger.exception("Error running process.") logfilepath = os.path.join(self.opendrone_args.project_path, self.args.logfilename) with open(logfilepath, 'wb', 0) as logfile: if proc: # Loop here processing the output until the proc finishes self.logger.debug("Waiting for process to finish") connector.status_update(StatusMessage.processing, resource, "Waiting for process to finish") while proc.returncode is None: if not proc.stdout is None: try: while True: line = proc.stdout.readline() if line: logfile.write(line.encode('utf-8')) line = line.rstrip('\n') if "[ERROR]" in line: connector.status_update( StatusMessage.processing, resource, line.replace("[ERROR] ", "")) line = line.replace("[ERROR] ", "") self.logger.error(line) elif " ERROR: " in line: connector.status_update( StatusMessage.processing, resource, re.sub(r".* ERROR: ", "ERROR: ", line)) line = re.sub(r".* ERROR: ", "ERROR: ", line) self.logger.error(line) elif "[WARNING]" in line: line = line.replace("[WARNING] ", "") self.logger.warning(line) elif " WARNING: " in line: line = re.sub(r".* WARNING: ", "", line) self.logger.warning(line) elif "[INFO]" in line: line = line.replace("[INFO] ", "") self.logger.info(line) elif " INFO: " in line: line = re.sub(r".* INFO: ", "", line) self.logger.info(line) else: self.logger.debug(line) else: proc.poll() break except Exception as ex: self.logger.exception("Error reading line.") connector.status_update( StatusMessage.processing, resource, "Ignoring exception while waiting: " + str(ex)) # Sleep and try again for process to complete time.sleep(1) self.logger.debug("Return code: " + str(proc.returncode)) connector.status_update(StatusMessage.processing, resource, "Return code: " + str(proc.returncode)) if proc.returncode != 0 and self.args.waitonerror: connector.status_update( StatusMessage.processing, resource, "Bad return code, hanging out until killed") while True: connector.status_update(StatusMessage.processing, resource, "Sleeping for 1000 seconds") time.sleep(1000) connector.status_update(StatusMessage.processing, resource, "OpenDroneMap app finished.") self.logger.debug('OpenDroneMap app finished - %s' % system.now()) return
from opendm import log from opendm import config from opendm import system from opendm import io import ecto import os from scripts.odm_app import ODMApp if __name__ == '__main__': args = config.config() log.ODM_INFO('Initializing OpenDroneMap app - %s' % system.now()) # Add project dir if doesn't exist args.project_path = io.join_paths(args.project_path, args.name) if not io.dir_exists(args.project_path): log.ODM_WARNING('Directory %s does not exist. Creating it now.' % args.name) system.mkdir_p(os.path.abspath(args.project_path)) # If user asks to rerun everything, delete all of the existing progress directories. # TODO: Move this somewhere it's not hard-coded if args.rerun_all: log.ODM_DEBUG("Rerun all -- Removing old data") os.system("rm -rf " + args.project_path + "/images_resize " + args.project_path + "/odm_georeferencing " + args.project_path + "/odm_meshing " + args.project_path +
def process(self, args, outputs): outputs['start_time'] = system.now_raw() tree = types.ODM_Tree(args.project_path, args.gcp, args.geo) outputs['tree'] = tree if args.time and io.file_exists(tree.benchmarking): # Delete the previously made file os.remove(tree.benchmarking) with open(tree.benchmarking, 'a') as b: b.write('ODM Benchmarking file created %s\nNumber of Cores: %s\n\n' % (system.now(), context.num_cores)) # check if the image filename is supported def valid_image_filename(filename): (pathfn, ext) = os.path.splitext(filename) return ext.lower() in context.supported_extensions and pathfn[-5:] != "_mask" # Get supported images from dir def get_images(in_dir): log.ODM_DEBUG(in_dir) entries = os.listdir(in_dir) valid, rejects = [], [] for f in entries: if valid_image_filename(f): valid.append(f) else: rejects.append(f) return valid, rejects def find_mask(photo_path, masks): (pathfn, ext) = os.path.splitext(os.path.basename(photo_path)) k = "{}_mask".format(pathfn) mask = masks.get(k) if mask: # Spaces are not supported due to OpenSfM's mask_list.txt format reqs if not " " in mask: return mask else: log.ODM_WARNING("Image mask {} has a space. Spaces are currently not supported for image masks.".format(mask)) # get images directory images_dir = tree.dataset_raw # define paths and create working directories system.mkdir_p(tree.odm_georeferencing) log.ODM_INFO('Loading dataset from: %s' % images_dir) # check if we rerun cell or not images_database_file = os.path.join(tree.root_path, 'images.json') if not io.file_exists(images_database_file) or self.rerun(): if not os.path.exists(images_dir): raise system.ExitException("There are no images in %s! Make sure that your project path and dataset name is correct. The current is set to: %s" % (images_dir, args.project_path)) files, rejects = get_images(images_dir) if files: # create ODMPhoto list path_files = [os.path.join(images_dir, f) for f in files] # Lookup table for masks masks = {} for r in rejects: (p, ext) = os.path.splitext(r) if p[-5:] == "_mask" and ext.lower() in context.supported_extensions: masks[p] = r photos = [] with open(tree.dataset_list, 'w') as dataset_list: log.ODM_INFO("Loading %s images" % len(path_files)) for f in path_files: try: p = types.ODM_Photo(f) p.set_mask(find_mask(f, masks)) photos.append(p) dataset_list.write(photos[-1].filename + '\n') except PhotoCorruptedException: log.ODM_WARNING("%s seems corrupted and will not be used" % os.path.basename(f)) # Check if a geo file is available if tree.odm_geo_file is not None and os.path.isfile(tree.odm_geo_file): log.ODM_INFO("Found image geolocation file") gf = GeoFile(tree.odm_geo_file) updated = 0 for p in photos: entry = gf.get_entry(p.filename) if entry: p.update_with_geo_entry(entry) p.compute_opk() updated += 1 log.ODM_INFO("Updated %s image positions" % updated) # GPSDOP override if we have GPS accuracy information (such as RTK) if 'gps_accuracy_is_set' in args: log.ODM_INFO("Forcing GPS DOP to %s for all images" % args.gps_accuracy) for p in photos: p.override_gps_dop(args.gps_accuracy) # Override projection type if args.camera_lens != "auto": log.ODM_INFO("Setting camera lens to %s for all images" % args.camera_lens) for p in photos: p.override_camera_projection(args.camera_lens) # Automatic sky removal if args.sky_removal: # For each image that : # - Doesn't already have a mask, AND # - Is not nadir (or if orientation info is missing), AND # - There are no spaces in the image filename (OpenSfM requirement) # Automatically generate a sky mask # Generate list of sky images sky_images = [] for p in photos: if p.mask is None and (p.pitch is None or (abs(p.pitch) > 20)) and (not " " in p.filename): sky_images.append({'file': os.path.join(images_dir, p.filename), 'p': p}) if len(sky_images) > 0: log.ODM_INFO("Automatically generating sky masks for %s images" % len(sky_images)) model = ai.get_model("skyremoval", "https://github.com/OpenDroneMap/SkyRemoval/releases/download/v1.0.5/model.zip", "v1.0.5") if model is not None: sf = SkyFilter(model=model) def parallel_sky_filter(item): try: mask_file = sf.run_img(item['file'], images_dir) # Check and set if mask_file is not None and os.path.isfile(mask_file): item['p'].set_mask(os.path.basename(mask_file)) log.ODM_INFO("Wrote %s" % os.path.basename(mask_file)) else: log.ODM_WARNING("Cannot generate mask for %s" % img) except Exception as e: log.ODM_WARNING("Cannot generate mask for %s: %s" % (img, str(e))) parallel_map(parallel_sky_filter, sky_images, max_workers=args.max_concurrency) log.ODM_INFO("Sky masks generation completed!") else: log.ODM_WARNING("Cannot load AI model (you might need to be connected to the internet?)") else: log.ODM_INFO("No sky masks will be generated (masks already provided, or images are nadir)") # End sky removal # Save image database for faster restart save_images_database(photos, images_database_file) else: raise system.ExitException('Not enough supported images in %s' % images_dir) else: # We have an images database, just load it photos = load_images_database(images_database_file) log.ODM_INFO('Found %s usable images' % len(photos)) log.logger.log_json_images(len(photos)) # Create reconstruction object reconstruction = types.ODM_Reconstruction(photos) if tree.odm_georeferencing_gcp and not args.use_exif: reconstruction.georeference_with_gcp(tree.odm_georeferencing_gcp, tree.odm_georeferencing_coords, tree.odm_georeferencing_gcp_utm, tree.odm_georeferencing_model_txt_geo, rerun=self.rerun()) else: reconstruction.georeference_with_gps(tree.dataset_raw, tree.odm_georeferencing_coords, tree.odm_georeferencing_model_txt_geo, rerun=self.rerun()) reconstruction.save_proj_srs(os.path.join(tree.odm_georeferencing, tree.odm_georeferencing_proj)) outputs['reconstruction'] = reconstruction # Try to load boundaries if args.boundary: if reconstruction.is_georeferenced(): outputs['boundary'] = boundary.load_boundary(args.boundary, reconstruction.get_proj_srs()) else: args.boundary = None log.ODM_WARNING("Reconstruction is not georeferenced, but boundary file provided (will ignore boundary file)") # If sfm-algorithm is triangulation, check if photos have OPK if args.sfm_algorithm == 'triangulation': for p in photos: if not p.has_opk(): log.ODM_WARNING("No omega/phi/kappa angles found in input photos (%s), switching sfm-algorithm to incremental" % p.filename) args.sfm_algorithm = 'incremental' break
from opendm import log from opendm import config from opendm import system from opendm import io import ecto import os from scripts.odm_app import ODMApp if __name__ == '__main__': args = config.config() log.ODM_INFO('Initializing OpenDroneMap app - %s' % system.now()) # Add project dir if doesn't exist args.project_path = io.join_paths(args.project_path, args.name) if not io.dir_exists(args.project_path): log.ODM_WARNING('Directory %s does not exist. Creating it now.' % args.name) system.mkdir_p(os.path.abspath(args.project_path)) # If user asks to rerun everything, delete all of the existing progress directories. # TODO: Move this somewhere it's not hard-coded if args.rerun_all: os.system("rm -rf " + args.project_path + "images_resize/ " + args.project_path + "odm_georeferencing/ " + args.project_path + "odm_meshing/ " + args.project_path + "odm_orthophoto/ "
f.write(ccd_size) short_name = '\t\t\t\t<ShortName> {} </ShortName>\n'.format( camera_model) f.write(short_name) f.write('\t\t\t</CameraEntry>\n') f.write('\t\t</LocCamDataBase>\n') f.write('\t</ChantierDescripteur>\n') f.write('</Global>\n') # RUN if __name__ == '__main__': args = config.config() log.MM_INFO('Initializing NodeMICMAC app - %s' % system.now()) log.MM_INFO(args) progressbc.set_project_name(args.name) project_dir = io.join_paths(args.project_path, args.name) image_dir = io.join_paths(project_dir, 'images') IN_DOCKER = os.environ.get('DEBIAN_FRONTEND', False) if IN_DOCKER: mm3d = 'mm3d' else: mm3d = '/home/drnmppr-micmac/bin/mm3d' # for dev: locally installed micmac branch try:
def usage(): log.ODM_ERROR('USAGE: %s --project-path [project_path]' % sys.argv[0]) log.ODM_ERROR('OpenDroneMap app finished - %s' % system.now()) sys.exit(0)
from opendm import log from opendm import config from opendm import system from opendm import io from opendm.progress import progressbc from opendm.utils import double_quote, get_processing_results_paths import os from stages.odm_app import ODMApp if __name__ == '__main__': args = config.config() log.ODM_INFO('Initializing ODM - %s' % system.now()) # Print args args_dict = vars(args) log.ODM_INFO('==============') for k in sorted(args_dict.keys()): # Skip _is_set keys if k.endswith("_is_set"): continue # Don't leak token if k == 'sm_cluster' and args_dict[k] is not None: log.ODM_INFO('%s: True' % k) else: log.ODM_INFO('%s: %s' % (k, args_dict[k])) log.ODM_INFO('==============')
def process(self, args, outputs): # Load tree tree = types.ODM_Tree(args.project_path, args.images, args.gcp) outputs['tree'] = tree if args.time and io.file_exists(tree.benchmarking): # Delete the previously made file os.remove(tree.benchmarking) with open(tree.benchmarking, 'a') as b: b.write( 'ODM Benchmarking file created %s\nNumber of Cores: %s\n\n' % (system.now(), context.num_cores)) # check if the extension is supported def supported_extension(file_name): (pathfn, ext) = os.path.splitext(file_name) return ext.lower() in context.supported_extensions # Get supported images from dir def get_images(in_dir): # filter images for its extension type log.ODM_DEBUG(in_dir) return [ f for f in io.get_files_list(in_dir) if supported_extension(f) ] # get images directory input_dir = tree.input_images images_dir = tree.dataset_raw if not io.dir_exists(images_dir): log.ODM_INFO( "Project directory %s doesn't exist. Creating it now. " % images_dir) system.mkdir_p(images_dir) copied = [ copyfile(io.join_paths(input_dir, f), io.join_paths(images_dir, f)) for f in get_images(input_dir) ] # define paths and create working directories system.mkdir_p(tree.odm_georeferencing) if not args.use_3dmesh: system.mkdir_p(tree.odm_25dgeoreferencing) log.ODM_DEBUG('Loading dataset from: %s' % images_dir) # check if we rerun cell or not images_database_file = io.join_paths(tree.root_path, 'images.json') if not io.file_exists(images_database_file) or self.rerun(): files = get_images(images_dir) if files: # create ODMPhoto list path_files = [io.join_paths(images_dir, f) for f in files] photos = [] with open(tree.dataset_list, 'w') as dataset_list: for f in path_files: photos += [types.ODM_Photo(f)] dataset_list.write(photos[-1].filename + '\n') # Save image database for faster restart save_images_database(photos, images_database_file) else: log.ODM_ERROR('Not enough supported images in %s' % images_dir) exit(1) else: # We have an images database, just load it photos = load_images_database(images_database_file) log.ODM_INFO('Found %s usable images' % len(photos)) # append photos to cell output if not self.params.get('proj'): if tree.odm_georeferencing_gcp: outputs['reconstruction'] = types.ODM_Reconstruction( photos, coords_file=tree.odm_georeferencing_gcp) else: # Generate UTM from images try: if not io.file_exists( tree.odm_georeferencing_coords) or self.rerun(): location.extract_utm_coords( photos, tree.dataset_raw, tree.odm_georeferencing_coords) else: log.ODM_INFO("Coordinates file already exist: %s" % tree.odm_georeferencing_coords) except: log.ODM_WARNING('Could not generate coordinates file. ' 'Ignore if there is a GCP file') outputs['reconstruction'] = types.ODM_Reconstruction( photos, coords_file=tree.odm_georeferencing_coords) else: outputs['reconstruction'] = types.ODM_Reconstruction( photos, projstring=self.params.get('proj')) # Save proj to file for future use (unless this # dataset is not georeferenced) if outputs['reconstruction'].projection: with open( io.join_paths(tree.odm_georeferencing, tree.odm_georeferencing_proj), 'w') as f: f.write(outputs['reconstruction'].projection.srs)
def process(self, args, outputs): tree = types.ODM_Tree(args.project_path, args.gcp) outputs['tree'] = tree if args.time and io.file_exists(tree.benchmarking): # Delete the previously made file os.remove(tree.benchmarking) with open(tree.benchmarking, 'a') as b: b.write('ODM Benchmarking file created %s\nNumber of Cores: %s\n\n' % (system.now(), context.num_cores)) # check if the extension is supported def supported_extension(file_name): (pathfn, ext) = os.path.splitext(file_name) return ext.lower() in context.supported_extensions # Get supported images from dir def get_images(in_dir): # filter images for its extension type log.ODM_DEBUG(in_dir) return [f for f in io.get_files_list(in_dir) if supported_extension(f)] # get images directory input_dir = tree.input_images images_dir = tree.dataset_raw if not io.dir_exists(images_dir): log.ODM_INFO("Project directory %s doesn't exist. Creating it now. " % images_dir) system.mkdir_p(images_dir) copied = [copyfile(io.join_paths(input_dir, f), io.join_paths(images_dir, f)) for f in get_images(input_dir)] # define paths and create working directories system.mkdir_p(tree.odm_georeferencing) if not args.use_3dmesh: system.mkdir_p(tree.odm_25dgeoreferencing) log.ODM_INFO('Loading dataset from: %s' % images_dir) # check if we rerun cell or not images_database_file = io.join_paths(tree.root_path, 'images.json') if not io.file_exists(images_database_file) or self.rerun(): files = get_images(images_dir) if files: # create ODMPhoto list path_files = [io.join_paths(images_dir, f) for f in files] photos = [] with open(tree.dataset_list, 'w') as dataset_list: log.ODM_INFO("Loading %s images" % len(path_files)) for f in path_files: photos += [types.ODM_Photo(f)] dataset_list.write(photos[-1].filename + '\n') # Save image database for faster restart save_images_database(photos, images_database_file) else: log.ODM_ERROR('Not enough supported images in %s' % images_dir) exit(1) else: # We have an images database, just load it photos = load_images_database(images_database_file) log.ODM_INFO('Found %s usable images' % len(photos)) # Create reconstruction object reconstruction = types.ODM_Reconstruction(photos) if tree.odm_georeferencing_gcp and not args.use_exif: reconstruction.georeference_with_gcp(tree.odm_georeferencing_gcp, tree.odm_georeferencing_coords, tree.odm_georeferencing_gcp_utm, rerun=self.rerun()) else: reconstruction.georeference_with_gps(tree.dataset_raw, tree.odm_georeferencing_coords, rerun=self.rerun()) reconstruction.save_proj_srs(io.join_paths(tree.odm_georeferencing, tree.odm_georeferencing_proj)) outputs['reconstruction'] = reconstruction
def process(self, args, outputs): outputs['start_time'] = system.now_raw() tree = types.ODM_Tree(args.project_path, args.gcp, args.geo) outputs['tree'] = tree if args.time and io.file_exists(tree.benchmarking): # Delete the previously made file os.remove(tree.benchmarking) with open(tree.benchmarking, 'a') as b: b.write( 'ODM Benchmarking file created %s\nNumber of Cores: %s\n\n' % (system.now(), context.num_cores)) # check if the image filename is supported def valid_image_filename(filename): (pathfn, ext) = os.path.splitext(filename) return ext.lower( ) in context.supported_extensions and pathfn[-5:] != "_mask" # Get supported images from dir def get_images(in_dir): log.ODM_DEBUG(in_dir) entries = os.listdir(in_dir) valid, rejects = [], [] for f in entries: if valid_image_filename(f): valid.append(f) else: rejects.append(f) return valid, rejects def find_mask(photo_path, masks): (pathfn, ext) = os.path.splitext(os.path.basename(photo_path)) k = "{}_mask".format(pathfn) mask = masks.get(k) if mask: # Spaces are not supported due to OpenSfM's mask_list.txt format reqs if not " " in mask: return mask else: log.ODM_WARNING( "Image mask {} has a space. Spaces are currently not supported for image masks." .format(mask)) # get images directory images_dir = tree.dataset_raw # define paths and create working directories system.mkdir_p(tree.odm_georeferencing) log.ODM_INFO('Loading dataset from: %s' % images_dir) # check if we rerun cell or not images_database_file = os.path.join(tree.root_path, 'images.json') if not io.file_exists(images_database_file) or self.rerun(): if not os.path.exists(images_dir): raise system.ExitException( "There are no images in %s! Make sure that your project path and dataset name is correct. The current is set to: %s" % (images_dir, args.project_path)) files, rejects = get_images(images_dir) if files: # create ODMPhoto list path_files = [os.path.join(images_dir, f) for f in files] # Lookup table for masks masks = {} for r in rejects: (p, ext) = os.path.splitext(r) if p[-5:] == "_mask" and ext.lower( ) in context.supported_extensions: masks[p] = r photos = [] with open(tree.dataset_list, 'w') as dataset_list: log.ODM_INFO("Loading %s images" % len(path_files)) for f in path_files: try: p = types.ODM_Photo(f) p.set_mask(find_mask(f, masks)) photos += [p] dataset_list.write(photos[-1].filename + '\n') except PhotoCorruptedException: log.ODM_WARNING( "%s seems corrupted and will not be used" % os.path.basename(f)) # Check if a geo file is available if tree.odm_geo_file is not None and os.path.isfile( tree.odm_geo_file): log.ODM_INFO("Found image geolocation file") gf = GeoFile(tree.odm_geo_file) updated = 0 for p in photos: entry = gf.get_entry(p.filename) if entry: p.update_with_geo_entry(entry) p.compute_opk() updated += 1 log.ODM_INFO("Updated %s image positions" % updated) # GPSDOP override if we have GPS accuracy information (such as RTK) if 'gps_accuracy_is_set' in args: log.ODM_INFO("Forcing GPS DOP to %s for all images" % args.gps_accuracy) for p in photos: p.override_gps_dop(args.gps_accuracy) # Override projection type if args.camera_lens != "auto": log.ODM_INFO("Setting camera lens to %s for all images" % args.camera_lens) for p in photos: p.override_camera_projection(args.camera_lens) # Save image database for faster restart save_images_database(photos, images_database_file) else: raise system.ExitException( 'Not enough supported images in %s' % images_dir) else: # We have an images database, just load it photos = load_images_database(images_database_file) log.ODM_INFO('Found %s usable images' % len(photos)) log.logger.log_json_images(len(photos)) # Create reconstruction object reconstruction = types.ODM_Reconstruction(photos) if tree.odm_georeferencing_gcp and not args.use_exif: reconstruction.georeference_with_gcp( tree.odm_georeferencing_gcp, tree.odm_georeferencing_coords, tree.odm_georeferencing_gcp_utm, tree.odm_georeferencing_model_txt_geo, rerun=self.rerun()) else: reconstruction.georeference_with_gps( tree.dataset_raw, tree.odm_georeferencing_coords, tree.odm_georeferencing_model_txt_geo, rerun=self.rerun()) reconstruction.save_proj_srs( os.path.join(tree.odm_georeferencing, tree.odm_georeferencing_proj)) outputs['reconstruction'] = reconstruction # Try to load boundaries if args.boundary: if reconstruction.is_georeferenced(): outputs['boundary'] = boundary.load_boundary( args.boundary, reconstruction.get_proj_srs()) else: args.boundary = None log.ODM_WARNING( "Reconstruction is not georeferenced, but boundary file provided (will ignore boundary file)" ) # If sfm-algorithm is triangulation, check if photos have OPK if args.sfm_algorithm == 'triangulation': for p in photos: if not p.has_opk(): log.ODM_WARNING( "No omega/phi/kappa angles found in input photos (%s), switching sfm-algorithm to incremental" % p.filename) args.sfm_algorithm = 'incremental' break
def configure(self, p, _i, _o): tree = types.ODM_Tree(p.args.project_path, p.args.images) self.tree = ecto.Constant(value=tree) # TODO(dakota) put this somewhere better maybe if p.args.time and io.file_exists(tree.benchmarking): # Delete the previously made file os.remove(tree.benchmarking) with open(tree.benchmarking, 'a') as b: b.write('ODM Benchmarking file created %s\nNumber of Cores: %s\n\n' % (system.now(), context.num_cores))
from stages.odm_app import ODMApp def odm_version(): try: with open("VERSION") as f: return f.read().split("\n")[0].strip() except: return "?" if __name__ == '__main__': args = config.config() log.ODM_INFO('Initializing ODM %s - %s' % (odm_version(), system.now())) # Print args args_dict = args_to_dict(args) log.ODM_INFO('==============') for k in args_dict.keys(): log.ODM_INFO('%s: %s' % (k, args_dict[k])) log.ODM_INFO('==============') progressbc.set_project_name(args.name) # Add project dir if doesn't exist args.project_path = os.path.join(args.project_path, args.name) if not io.dir_exists(args.project_path): log.ODM_WARNING('Directory %s does not exist. Creating it now.' % args.name)
from opendm import config from opendm import system from opendm import io from opendm.progress import progressbc from stages.dataset import DatasetStage from stages.colmap import FeaturesStage, MatchingStage, SparseStage, GeoregisterStage, DenseStage from stages.mesh import MeshStage from stages.mvstex import TextureStage from stages.dem import DEMStage from stages.ortho import OrthoStage from stages.georeferencing import GeoreferencingStage if __name__ == '__main__': args = config.config() log.ODM_INFO('Initializing NodeCM app - %s' % system.now()) log.ODM_INFO(args) if args.rerun_all: log.ODM_INFO("Rerun all -- Removing old data") os.system("rm -rf " + " ".join([ quote(os.path.join(args.project_path, "dense")), quote(os.path.join(args.project_path, "database.db")), quote(os.path.join(args.project_path, "odm_dem")), quote(os.path.join(args.project_path, "odm_georeferencing")), quote(os.path.join(args.project_path, "odm_meshing")), quote(os.path.join(args.project_path, "odm_texturing")), quote(os.path.join(args.project_path, "entwine_pointcloud")), quote(os.path.join(args.project_path, "odm_orthophoto")), ]))