def process_user_properties(import_path, user_name, organization_username=None, organization_key=None, private=False, master_upload=False, verbose=False, rerun=False, skip_subfolders=False): # basic check for all import_path = os.path.abspath(import_path) if not os.path.isdir(import_path): print("Error, import directory " + import_path + " doesnt not exist, exiting...") sys.exit() # get list of file to process process_file_list = processing.get_process_file_list( import_path, "user_process", rerun, verbose, skip_subfolders) if not len(process_file_list): print("No images to run user process") print( "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun" ) # sanity checks if not user_name: print("Error, must provide a valid user name, exiting...") processing.create_and_log_process_in_list(process_file_list, "user_process", "failed", verbose) return if private and not organization_username and not organization_key: print( "Error, if the import belongs to a private repository, you need to provide a valid organization user name or key to which the private repository belongs to, exiting..." ) processing.create_and_log_process_in_list(process_file_list, "user_process", "failed", verbose) return # function calls if not master_upload: user_properties = processing.user_properties(user_name, import_path, process_file_list, organization_username, organization_key, private, verbose) else: user_properties = processing.user_properties_master( user_name, import_path, process_file_list, organization_key, private, verbose) # write data and logs processing.create_and_log_process_in_list(process_file_list, "user_process", "success", verbose, user_properties)
def insert_MAPJson(import_path, master_upload=False, verbose=False, rerun=False, skip_subfolders=False, skip_EXIF_insert=False): # basic check for all import_path = os.path.abspath(import_path) if not os.path.isdir(import_path): print("Error, import directory " + import_path + " doesnt not exist, exiting...") sys.exit() # get list of file to process process_file_list = processing.get_process_file_list(import_path, "mapillary_image_description", rerun, verbose, skip_subfolders) if not len(process_file_list): print("No images to run process finalization") print("If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun") for image in process_file_list: # check the processing logs log_root = uploader.log_rootpath(import_path, image) duplicate_path = os.path.join(log_root, "duplicate") if os.path.isfile(duplicate_path): continue final_mapillary_image_description = processing.get_final_mapillary_image_description(log_root, image, master_upload, verbose, skip_EXIF_insert) processing.create_and_log_process(image, import_path, "mapillary_image_description", "success", final_mapillary_image_description, verbose=verbose)
def process_import_meta_properties(import_path, orientation=None, device_make=None, device_model=None, GPS_accuracy=None, add_file_name=False, add_import_date=False, verbose=False, rerun=False, skip_subfolders=False): # basic check for all import_path = os.path.abspath(import_path) if not os.path.isdir(import_path): print("Error, import directory " + import_path + " doesnt not exist, exiting...") sys.exit() # get list of file to process process_file_list = processing.get_process_file_list(import_path, "import_meta_data_process", rerun, verbose, skip_subfolders) if not len(process_file_list): print("No images to run import meta data process") print("If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun") # map orientation from degrees to tags if orientation: orientation = processing.format_orientation(orientation) # read import meta from image EXIF and finalize the import # properties process for image in process_file_list: import_meta_data_properties = get_import_meta_properties_exif( image, verbose) finalize_import_properties_process(image, import_path, orientation, device_make, device_model, GPS_accuracy, add_file_name, add_import_date, verbose, import_meta_data_properties)
def insert_MAPJson(import_path, master_upload=False, verbose=False, rerun=False, skip_subfolders=False, skip_EXIF_insert=False, keep_original=False): # basic check for all import_path = os.path.abspath(import_path) if not os.path.isdir(import_path): print("Error, import directory " + import_path + " doesnt not exist, exiting...") sys.exit() # get list of file to process process_file_list = processing.get_process_file_list( import_path, "mapillary_image_description", rerun, verbose, skip_subfolders) if not len(process_file_list): print("No images to run process finalization") print( "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun" ) for image in process_file_list: # check the processing logs log_root = uploader.log_rootpath(image) duplicate_path = os.path.join(log_root, "duplicate") if os.path.isfile(duplicate_path): continue final_mapillary_image_description = processing.get_final_mapillary_image_description( log_root, image, master_upload, verbose, skip_EXIF_insert, keep_original) processing.create_and_log_process(image, "mapillary_image_description", "success", final_mapillary_image_description, verbose=verbose)
def process_geotag_properties(import_path, geotag_source="exif", geotag_source_path=None, offset_time=0.0, offset_angle=0.0, local_time=False, sub_second_interval=0.0, use_gps_start_time=False, verbose=False, rerun=False, skip_subfolders=False, video_file=None): # sanity check if video file is passed if video_file and not (os.path.isdir(video_file) or os.path.isfile(video_file)): print("Error, video path " + video_file + " does not exist, exiting...") sys.exit(1) # in case of video processing, adjust the import path if video_file: # set sampling path video_sampling_path = processing.sampled_video_frames_rootpath( video_file) import_path = os.path.join( os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join( os.path.dirname(video_file), video_sampling_path) # basic check for all if not import_path or not os.path.isdir(import_path): print("Error, import directory " + import_path + " does not exist, exiting...") sys.exit(1) # get list of file to process process_file_list = processing.get_process_file_list( import_path, "geotag_process", rerun, verbose, skip_subfolders) if not len(process_file_list): print("No images to run geotag process") print( "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun" ) # sanity checks if geotag_source_path == None and geotag_source != "exif": # if geotagging from external log file, path to the external log file # needs to be provided, if not, exit print( "Error, if geotagging from external log, rather than image EXIF, you need to provide full path to the log file." ) processing.create_and_log_process_in_list(process_file_list, "geotag_process" "failed", verbose) sys.exit(1) elif geotag_source != "exif" and not os.path.isfile( geotag_source_path) and not os.path.isdir(geotag_source_path): print( "Error, " + geotag_source_path + " file source of gps/time properties does not exist. If geotagging from external log, rather than image EXIF, you need to provide full path to the log file." ) processing.create_and_log_process_in_list(process_file_list, "geotag_process" "failed", verbose) sys.exit(1) # function calls if geotag_source == "exif": geotag_properties = processing.geotag_from_exif( process_file_list, import_path, offset_angle, verbose) elif geotag_source == "gpx" or geotag_source == "nmea": geotag_properties = processing.geotag_from_gps_trace( process_file_list, import_path, geotag_source, geotag_source_path, offset_time, offset_angle, local_time, sub_second_interval, use_gps_start_time, verbose) elif geotag_source == "csv": geotag_properties = processing.geotag_from_csv(process_file_list, import_path, geotag_source_path, offset_time, offset_angle, verbose) elif geotag_source == "gopro_video": geotag_properties = processing.geotag_from_gopro_video( process_file_list, import_path, geotag_source_path, offset_time, offset_angle, local_time, sub_second_interval, use_gps_start_time, verbose) elif geotag_source == "blackvue_videos": geotag_properties = processing.geotag_from_blackvue_video( process_file_list, import_path, geotag_source_path, offset_time, offset_angle, local_time, sub_second_interval, use_gps_start_time, verbose) elif geotag_source == "json": geotag_properties = processing.geotag_from_json( process_file_list, import_path, geotag_source_path, offset_time, offset_angle, verbose) print("Sub process finished")
def process_upload_params(import_path, user_name, master_upload=False, verbose=False, rerun=False, skip_subfolders=False, video_import_path=None): # sanity check if video file is passed if video_import_path and not os.path.isdir( video_import_path) and not os.path.isfile(video_import_path): print("Error, video path " + video_import_path + " does not exist, exiting...") sys.exit(1) # in case of video processing, adjust the import path if video_import_path: # set sampling path video_sampling_path = "mapillary_sampled_video_frames" video_dirname = video_import_path if os.path.isdir( video_import_path) else os.path.dirname(video_import_path) import_path = os.path.join( os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join( os.path.abspath(video_dirname), video_sampling_path) # basic check for all if not import_path or not os.path.isdir(import_path): print_error("Error, import directory " + import_path + " does not exist, exiting...") sys.exit(1) # get list of file to process process_file_list = processing.get_process_file_list( import_path, "upload_params_process", rerun, verbose, skip_subfolders) if not len(process_file_list): print("No images to run upload params process") print( "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun" ) # sanity checks if not user_name: print_error("Error, must provide a valid user name, exiting...") processing.create_and_log_process_in_list( process_file_list, "upload_params_process" "failed", verbose) sys.exit(1) if not master_upload: try: credentials = uploader.authenticate_user(user_name) except: print_error("Error, user authentication failed for user " + user_name) processing.create_and_log_process_in_list( process_file_list, "upload_params_process" "failed", verbose) sys.exit(1) if credentials == None or "user_upload_token" not in credentials or "user_permission_hash" not in credentials or "user_signature_hash" not in credentials: print_error("Error, user authentication failed for user " + user_name) processing.create_and_log_process_in_list( process_file_list, "upload_params_process" "failed", verbose) sys.exit(1) user_upload_token = credentials["user_upload_token"] user_permission_hash = credentials["user_permission_hash"] user_signature_hash = credentials["user_signature_hash"] user_key = credentials["MAPSettingsUserKey"] for image in tqdm(process_file_list, desc="Processing image upload parameters"): # check the status of the sequence processing log_root = uploader.log_rootpath(image) duplicate_flag_path = os.path.join(log_root, "duplicate") upload_params_path = os.path.join(log_root, "upload_params_process.json") if os.path.isfile(upload_params_path): os.remove(upload_params_path) if os.path.isfile(duplicate_flag_path) or master_upload: continue upload_params_properties = processing.get_upload_param_properties( log_root, image, user_name, user_upload_token, user_permission_hash, user_signature_hash, user_key, verbose) processing.create_and_log_process(image, "upload_params_process", "success", upload_params_properties, verbose=verbose) # flag manual upload log_manual_upload = os.path.join(log_root, "manual_upload") open(log_manual_upload, 'a').close() print("Sub process ended")
def process_upload_params(import_path, user_name, master_upload=False, verbose=False, rerun=False, skip_subfolders=False): # basic check for all import_path = os.path.abspath(import_path) if not os.path.isdir(import_path): print("Error, import directory " + import_path + " doesnt not exist, exiting...") sys.exit() # get list of file to process process_file_list = processing.get_process_file_list( import_path, "upload_params_process", rerun, verbose, skip_subfolders) if not len(process_file_list): print("No images to run upload params process") print( "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun" ) # sanity checks if not user_name: print("Error, must provide a valid user name, exiting...") processing.create_and_log_process_in_list( process_file_list, "upload_params_process" "failed", verbose) return if not master_upload: try: credentials = uploader.authenticate_user(user_name) except: print("Error, user authentication failed for user " + user_name) processing.create_and_log_process_in_list( process_file_list, "upload_params_process" "failed", verbose) return if credentials == None or "user_upload_token" not in credentials or "user_permission_hash" not in credentials or "user_signature_hash" not in credentials: print("Error, user authentication failed for user " + user_name) processing.create_and_log_process_in_list( process_file_list, "upload_params_process" "failed", verbose) return user_upload_token = credentials["user_upload_token"] user_permission_hash = credentials["user_permission_hash"] user_signature_hash = credentials["user_signature_hash"] user_key = credentials["MAPSettingsUserKey"] for image in process_file_list: # check the status of the sequence processing log_root = uploader.log_rootpath(image) duplicate_flag_path = os.path.join(log_root, "duplicate") upload_params_path = os.path.join(log_root, "upload_params_process.json") if os.path.isfile(upload_params_path): os.remove(upload_params_path) if os.path.isfile(duplicate_flag_path) or master_upload: continue upload_params_properties = processing.get_upload_param_properties( log_root, image, user_name, user_upload_token, user_permission_hash, user_signature_hash, user_key, verbose) processing.create_and_log_process(image, "upload_params_process", "success", upload_params_properties, verbose=verbose) # flag manual upload log_manual_upload = os.path.join(log_root, "manual_upload") open(log_manual_upload, 'a').close()
def process_import_meta_properties(import_path, orientation=None, device_make=None, device_model=None, GPS_accuracy=None, add_file_name=False, add_import_date=False, verbose=False, rerun=False, skip_subfolders=False, video_file=None, custom_meta_data=None, camera_uuid=None): # sanity check if video file is passed if video_file and not (os.path.isdir(video_file) or os.path.isfile(video_file)): print("Error, video path " + video_file + " does not exist, exiting...") sys.exit(1) # in case of video processing, adjust the import path if video_file: # set sampling path video_sampling_path = processing.sampled_video_frames_rootpath( video_file) import_path = os.path.join( os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join( os.path.dirname(video_file), video_sampling_path) # basic check for all if not import_path or not os.path.isdir(import_path): print("Error, import directory " + import_path + " does not exist, exiting...") sys.exit(1) # get list of file to process process_file_list = processing.get_process_file_list( import_path, "import_meta_data_process", rerun, verbose, skip_subfolders) if not len(process_file_list): print("No images to run import meta data process") print( "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun" ) # map orientation from degrees to tags if orientation: orientation = processing.format_orientation(orientation) # read import meta from image EXIF and finalize the import # properties process progress_count = 0 for image in process_file_list: progress_count += 1 if verbose: if (progress_count % 50) == 0: sys.stdout.write(".") if (progress_count % 5000) == 0: print("") import_meta_data_properties = get_import_meta_properties_exif( image, verbose) finalize_import_properties_process(image, import_path, orientation, device_make, device_model, GPS_accuracy, add_file_name, add_import_date, verbose, import_meta_data_properties, custom_meta_data, camera_uuid) print("Sub process finished")
def insert_MAPJson(import_path, master_upload=False, verbose=False, rerun=False, skip_subfolders=False, skip_EXIF_insert=False, keep_original=False, video_import_path=None, overwrite_all_EXIF_tags=False, overwrite_EXIF_time_tag=False, overwrite_EXIF_gps_tag=False, overwrite_EXIF_direction_tag=False, overwrite_EXIF_orientation_tag=False): # sanity check if video file is passed if video_import_path and not os.path.isdir(video_import_path): print("Error, video path " + video_import_path + " does not exist, exiting...") sys.exit(1) # in case of video processing, adjust the import path if video_import_path: # set sampling path video_sampling_path = "mapillary_sampled_video_frames" import_path = os.path.join( os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join( os.path.abspath(video_import_path), video_sampling_path) # basic check for all if not import_path or not os.path.isdir(import_path): print("Error, import directory " + import_path + " does not exist, exiting...") sys.exit(1) # get list of file to process process_file_list = processing.get_process_file_list( import_path, "mapillary_image_description", rerun, verbose, skip_subfolders) if not len(process_file_list): print("No images to run process finalization") print( "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun" ) for image in tqdm( process_file_list, desc="Inserting mapillary image description in image EXIF"): # check the processing logs log_root = uploader.log_rootpath(image) duplicate_path = os.path.join(log_root, "duplicate") if os.path.isfile(duplicate_path): continue final_mapillary_image_description = processing.get_final_mapillary_image_description( log_root, image, master_upload, verbose, skip_EXIF_insert, keep_original, overwrite_all_EXIF_tags, overwrite_EXIF_time_tag, overwrite_EXIF_gps_tag, overwrite_EXIF_direction_tag, overwrite_EXIF_orientation_tag) processing.create_and_log_process(image, "mapillary_image_description", "success", final_mapillary_image_description, verbose=verbose) print("Sub process ended")
def process_user_properties(import_path, user_name, organization_username=None, organization_key=None, private=False, master_upload=False, verbose=False, rerun=False, skip_subfolders=False, video_import_path=None): # sanity check if video file is passed if video_import_path and not os.path.isdir(video_import_path) and not os.path.isfile(video_import_path): print("Error, video path " + video_import_path + " does not exist, exiting...") sys.exit(1) # in case of video processing, adjust the import path if video_import_path: # set sampling path video_sampling_path = "mapillary_sampled_video_frames" video_dirname = video_import_path if os.path.isdir( video_import_path) else os.path.dirname(video_import_path) import_path = os.path.join(os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join( os.path.abspath(video_dirname), video_sampling_path) # basic check for all if not import_path or not os.path.isdir(import_path): print_error("Error, import directory " + import_path + " does not exist, exiting...") sys.exit(1) # get list of file to process process_file_list = processing.get_process_file_list(import_path, "user_process", rerun, verbose, skip_subfolders) if not len(process_file_list): print("No images to run user process") print("If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun") # sanity checks if not user_name: print_error("Error, must provide a valid user name, exiting...") processing.create_and_log_process_in_list(process_file_list, "user_process", "failed", verbose) sys.exit(1) if private and not organization_username and not organization_key: print_error("Error, if the import belongs to a private repository, you need to provide a valid organization user name or key to which the private repository belongs to, exiting...") processing.create_and_log_process_in_list(process_file_list, "user_process", "failed", verbose) sys.exit(1) # function calls if not master_upload: user_properties = processing.user_properties(user_name, import_path, process_file_list, organization_username, organization_key, private, verbose) else: user_properties = processing.user_properties_master(user_name, import_path, process_file_list, organization_key, private, verbose) # write data and logs processing.create_and_log_process_in_list(process_file_list, "user_process", "success", verbose, user_properties) print("Sub process ended")
def process_upload_params(import_path, user_name, master_upload=False, verbose=False, rerun=False, skip_subfolders=False, video_import_path=None): # sanity check if video file is passed if video_import_path and not os.path.isdir(video_import_path) and not os.path.isfile(video_import_path): print("Error, video path " + video_import_path + " does not exist, exiting...") sys.exit(1) # in case of video processing, adjust the import path if video_import_path: # set sampling path video_sampling_path = "mapillary_sampled_video_frames" video_dirname = video_import_path if os.path.isdir( video_import_path) else os.path.dirname(video_import_path) import_path = os.path.join(os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join( os.path.abspath(video_dirname), video_sampling_path) # basic check for all if not import_path or not os.path.isdir(import_path): print_error("Error, import directory " + import_path + " does not exist, exiting...") sys.exit(1) # get list of file to process process_file_list = processing.get_process_file_list(import_path, "upload_params_process", rerun, verbose, skip_subfolders) if not len(process_file_list): print("No images to run upload params process") print("If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun") # sanity checks if not user_name: print_error("Error, must provide a valid user name, exiting...") processing.create_and_log_process_in_list(process_file_list, "upload_params_process" "failed", verbose) sys.exit(1) if not master_upload: try: credentials = uploader.authenticate_user(user_name) except: print_error( "Error, user authentication failed for user " + user_name) processing.create_and_log_process_in_list(process_file_list, "upload_params_process" "failed", verbose) sys.exit(1) if credentials == None or "user_upload_token" not in credentials or "user_permission_hash" not in credentials or "user_signature_hash" not in credentials: print_error( "Error, user authentication failed for user " + user_name) processing.create_and_log_process_in_list(process_file_list, "upload_params_process" "failed", verbose) sys.exit(1) user_upload_token = credentials["user_upload_token"] user_permission_hash = credentials["user_permission_hash"] user_signature_hash = credentials["user_signature_hash"] user_key = credentials["MAPSettingsUserKey"] for image in tqdm(process_file_list, desc="Processing image upload parameters"): # check the status of the sequence processing log_root = uploader.log_rootpath(image) duplicate_flag_path = os.path.join(log_root, "duplicate") upload_params_path = os.path.join( log_root, "upload_params_process.json") if os.path.isfile(upload_params_path): os.remove(upload_params_path) if os.path.isfile(duplicate_flag_path) or master_upload: continue upload_params_properties = processing.get_upload_param_properties(log_root, image, user_name, user_upload_token, user_permission_hash, user_signature_hash, user_key, verbose) processing.create_and_log_process(image, "upload_params_process", "success", upload_params_properties, verbose=verbose) # flag manual upload log_manual_upload = os.path.join( log_root, "manual_upload") open(log_manual_upload, 'a').close() print("Sub process ended")
def process_upload_params(import_path, user_name, master_upload=False, verbose=False, rerun=False, skip_subfolders=False): # basic check for all import_path = os.path.abspath(import_path) if not os.path.isdir(import_path): print("Error, import directory " + import_path + " doesnt not exist, exiting...") sys.exit() # get list of file to process process_file_list = processing.get_process_file_list(import_path, "upload_params_process", rerun, verbose, skip_subfolders) if not len(process_file_list): print("No images to run upload params process") print("If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun") # sanity checks if not user_name: print("Error, must provide a valid user name, exiting...") processing.create_and_log_process_in_list(process_file_list, import_path, "upload_params_process" "failed", verbose) return if not master_upload: try: credentials = uploader.authenticate_user(user_name) except: print("Error, user authentication failed for user " + user_name) processing.create_and_log_process_in_list(process_file_list, import_path, "upload_params_process" "failed", verbose) return if credentials == None or "user_upload_token" not in credentials or "user_permission_hash" not in credentials or "user_signature_hash" not in credentials: print("Error, user authentication failed for user " + user_name) processing.create_and_log_process_in_list(process_file_list, import_path, "upload_params_process" "failed", verbose) return user_upload_token = credentials["user_upload_token"] user_permission_hash = credentials["user_permission_hash"] user_signature_hash = credentials["user_signature_hash"] user_key = credentials["MAPSettingsUserKey"] for image in process_file_list: # check the status of the sequence processing log_root = uploader.log_rootpath(import_path, image) duplicate_flag_path = os.path.join(log_root, "duplicate") upload_params_path = os.path.join( log_root, "upload_params_process.json") if os.path.isfile(upload_params_path): os.remove(upload_params_path) if os.path.isfile(duplicate_flag_path) or master_upload: continue upload_params_properties = processing.get_upload_param_properties(log_root, image, user_name, user_upload_token, user_permission_hash, user_signature_hash, user_key, verbose) processing.create_and_log_process(image, import_path, "upload_params_process", "success", upload_params_properties, verbose=verbose) # flag manual upload log_manual_upload = os.path.join( log_root, "manual_upload") open(log_manual_upload, 'a').close()
def process_geotag_properties(import_path, geotag_source="exif", geotag_source_path=None, offset_time=0.0, offset_angle=0.0, local_time=False, sub_second_interval=0.0, use_gps_start_time=False, verbose=False, rerun=False, skip_subfolders=False): # basic check for all import_path = os.path.abspath(import_path) if not os.path.isdir(import_path): print("Error, import directory " + import_path + " doesnt not exist, exiting...") sys.exit() # get list of file to process process_file_list = processing.get_process_file_list(import_path, "geotag_process", rerun, verbose, skip_subfolders) if not len(process_file_list): print("No images to run geotag process") print("If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun") # sanity checks if geotag_source_path == None and geotag_source != "exif": # if geotagging from external log file, path to the external log file # needs to be provided, if not, exit print("Error, if geotagging from external log, rather than image EXIF, you need to provide full path to the log file.") processing.create_and_log_process_in_list(process_file_list, import_path, "geotag_process" "failed", verbose) return elif geotag_source != "exif" and not os.path.isfile(geotag_source_path): print("Error, " + geotag_source_path + " file source of gps/time properties does not exist. If geotagging from external log, rather than image EXIF, you need to provide full path to the log file.") processing.create_and_log_process_in_list(process_file_list, import_path, "geotag_process" "failed", verbose) return # function calls if geotag_source == "exif": geotag_properties = processing.geotag_from_exif(process_file_list, import_path, offset_angle, verbose) elif geotag_source == "gpx": geotag_properties = processing.geotag_from_gpx(process_file_list, import_path, geotag_source_path, offset_time, offset_angle, local_time, sub_second_interval, use_gps_start_time, verbose) elif geotag_source == "csv": geotag_properties = processing.geotag_from_csv(process_file_list, import_path, geotag_source_path, offset_time, offset_angle, verbose) elif geotag_source == "gopro_video": geotag_properties = processing.geotag_from_gopro_video(process_file_list, import_path, geotag_source_path, offset_time, offset_angle, local_time, sub_second_interval, use_gps_start_time, verbose) elif geotag_source == "json": geotag_properties = processing.geotag_from_json(process_file_list, import_path, geotag_source_path, offset_time, offset_angle, verbose)
def process_geotag_properties(import_path, geotag_source="exif", geotag_source_path=None, offset_time=0.0, offset_angle=0.0, local_time=False, sub_second_interval=0.0, use_gps_start_time=False, verbose=False, rerun=False, skip_subfolders=False, video_import_path=None): # sanity check if video file is passed if video_import_path and not os.path.isdir(video_import_path) and not os.path.isfile(video_import_path): print("Error, video path " + video_import_path + " does not exist, exiting...") sys.exit(1) # in case of video processing, adjust the import path if video_import_path: # set sampling path video_sampling_path = "mapillary_sampled_video_frames" video_dirname = video_import_path if os.path.isdir( video_import_path) else os.path.dirname(video_import_path) import_path = os.path.join(os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join( os.path.abspath(video_dirname), video_sampling_path) # basic check for all if not import_path or not os.path.isdir(import_path): print_error("Error, import directory " + import_path + " does not exist, exiting...") sys.exit(1) # get list of file to process process_file_list = processing.get_process_file_list(import_path, "geotag_process", rerun, verbose, skip_subfolders) if not len(process_file_list): print("No images to run geotag process") print("If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun") # sanity checks if geotag_source_path == None and geotag_source != "exif": # if geotagging from external log file, path to the external log file # needs to be provided, if not, exit print_error( "Error, if geotagging from external log, rather than image EXIF, you need to provide full path to the log file.") processing.create_and_log_process_in_list(process_file_list, "geotag_process" "failed", verbose) sys.exit(1) elif geotag_source != "exif" and not os.path.isfile(geotag_source_path) and not os.path.isdir(geotag_source_path): print_error("Error, " + geotag_source_path + " file source of gps/time properties does not exist. If geotagging from external log, rather than image EXIF, you need to provide full path to the log file.") processing.create_and_log_process_in_list(process_file_list, "geotag_process" "failed", verbose) sys.exit(1) # function calls if geotag_source == "exif": geotag_properties = processing.geotag_from_exif(process_file_list, import_path, offset_time, offset_angle, verbose) elif geotag_source == "gpx" or geotag_source == "nmea": geotag_properties = processing.geotag_from_gps_trace(process_file_list, geotag_source, geotag_source_path, offset_time, offset_angle, local_time, sub_second_interval, use_gps_start_time, verbose) elif geotag_source == "gopro_videos": geotag_properties = processing.geotag_from_gopro_video(process_file_list, import_path, geotag_source_path, offset_time, offset_angle, local_time, sub_second_interval, use_gps_start_time, verbose) elif geotag_source == "blackvue_videos": geotag_properties = processing.geotag_from_blackvue_video(process_file_list, import_path, geotag_source_path, offset_time, offset_angle, local_time, sub_second_interval, use_gps_start_time, verbose) print("Sub process ended")
def process_geotag_properties(import_path, geotag_source="exif", geotag_source_path=None, offset_time=0.0, offset_angle=0.0, local_time=False, sub_second_interval=0.0, use_gps_start_time=False, verbose=False, rerun=False, skip_subfolders=False): # basic check for all import_path = os.path.abspath(import_path) if not os.path.isdir(import_path): print("Error, import directory " + import_path + " doesnt not exist, exiting...") sys.exit() # get list of file to process process_file_list = processing.get_process_file_list( import_path, "geotag_process", rerun, verbose, skip_subfolders) if not len(process_file_list): print("No images to run geotag process") print( "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun" ) # sanity checks if geotag_source_path == None and geotag_source != "exif": # if geotagging from external log file, path to the external log file # needs to be provided, if not, exit print( "Error, if geotagging from external log, rather than image EXIF, you need to provide full path to the log file." ) processing.create_and_log_process_in_list(process_file_list, "geotag_process" "failed", verbose) return elif geotag_source != "exif" and not os.path.isfile(geotag_source_path): print( "Error, " + geotag_source_path + " file source of gps/time properties does not exist. If geotagging from external log, rather than image EXIF, you need to provide full path to the log file." ) processing.create_and_log_process_in_list(process_file_list, "geotag_process" "failed", verbose) return # function calls if geotag_source == "exif": geotag_properties = processing.geotag_from_exif( process_file_list, import_path, offset_angle, verbose) elif geotag_source == "gpx" or geotag_source == "nmea": geotag_properties = processing.geotag_from_gps_trace( process_file_list, import_path, geotag_source, geotag_source_path, offset_time, offset_angle, local_time, sub_second_interval, use_gps_start_time, verbose) elif geotag_source == "csv": geotag_properties = processing.geotag_from_csv(process_file_list, import_path, geotag_source_path, offset_time, offset_angle, verbose) elif geotag_source == "gopro_video": geotag_properties = processing.geotag_from_gopro_video( process_file_list, import_path, geotag_source_path, offset_time, offset_angle, local_time, sub_second_interval, use_gps_start_time, verbose) elif geotag_source == "json": geotag_properties = processing.geotag_from_json( process_file_list, import_path, geotag_source_path, offset_time, offset_angle, verbose)
def process_sequence_properties(import_path, cutoff_distance=600.0, cutoff_time=60.0, interpolate_directions=False, flag_duplicates=False, duplicate_distance=0.1, duplicate_angle=5, offset_angle=0.0, verbose=False, rerun=False, skip_subfolders=False): # basic check for all import_path = os.path.abspath(import_path) if not os.path.isdir(import_path): print("Error, import directory " + import_path + " doesnt not exist, exiting...") sys.exit() sequences = [] if skip_subfolders: process_file_list = processing.get_process_file_list(import_path, "sequence_process", rerun, verbose, True, import_path) if not len(process_file_list): if verbose: print("No images to run sequence process in root " + import_path) print( "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun") else: # LOAD TIME AND GPS POINTS ------------------------------------ file_list, capture_times, lats, lons, directions = processing.load_geotag_points( process_file_list, import_path, verbose) # --------------------------------------- # SPLIT SEQUENCES -------------------------------------- if len(capture_times) and len(lats) and len(lons): sequences.extend(processing.split_sequences( capture_times, lats, lons, file_list, directions, cutoff_time, cutoff_distance, verbose)) # --------------------------------------- else: # sequence limited to the root of the files for root, dirs, files in os.walk(import_path): if ".mapillary" in root: continue if len(files): process_file_list = processing.get_process_file_list(import_path, "sequence_process", rerun, verbose, True, root) if not len(process_file_list): if verbose: print("No images to run sequence process in root " + root) print( "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun") continue # LOAD TIME AND GPS POINTS ------------------------------------ file_list, capture_times, lats, lons, directions = processing.load_geotag_points( process_file_list, import_path, verbose) # --------------------------------------- # SPLIT SEQUENCES -------------------------------------- if len(capture_times) and len(lats) and len(lons): sequences.extend(processing.split_sequences( capture_times, lats, lons, file_list, directions, cutoff_time, cutoff_distance, verbose)) # --------------------------------------- # process for each sequence for sequence in sequences: file_list = sequence["file_list"] directions = sequence["directions"] latlons = sequence["latlons"] capture_times = sequence["capture_times"] # COMPUTE DIRECTIONS -------------------------------------- interpolated_directions = [compute_bearing(ll1[0], ll1[1], ll2[0], ll2[1]) for ll1, ll2 in zip(latlons, latlons[1:])] interpolated_directions.append(directions[-1]) # use interpolated directions if direction not available or if flag for # interpolate_directions for i, d in enumerate(directions): directions[i] = d if ( d is not None and not interpolate_directions) else (interpolated_directions[i] + offset_angle) % 360.0 # --------------------------------------- # INTERPOLATE TIMESTAMPS, incase of identical timestamps capture_times, file_list = processing.interpolate_timestamp(capture_times, file_list) final_file_list = file_list[:] final_directions = directions[:] final_capture_times = capture_times[:] # FLAG DUPLICATES -------------------------------------- if flag_duplicates: final_file_list = [file_list[0]] final_directions = [directions[0]] prev_latlon = latlons[0] prev_direction = directions[0] for i, filename in enumerate(file_list[1:]): log_root = uploader.log_rootpath(import_path, filename) duplicate_flag_path = os.path.join(log_root, "duplicate") sequence_process_success_path = os.path.join(log_root, "sequence_process_success") k = i + 1 distance = gps_distance(latlons[k], prev_latlon) if directions[k] is not None and prev_direction is not None: direction_diff = diff_bearing(directions[k], prev_direction) else: # dont use bearing difference if no bearings are # available direction_diff = 360 if distance < duplicate_distance and direction_diff < duplicate_angle: open(duplicate_flag_path, "w").close() open(sequence_process_success_path, "w").close() open(sequence_process_success_path + "_" + str(time.strftime("%Y_%m_%d_%H_%M_%S", time.gmtime())), "w").close() else: prev_latlon = latlons[k] prev_direction = directions[k] final_file_list.append(filename) final_directions.append(directions[k]) # --------------------------------------- # FINALIZE ------------------------------------ for i in range(0, len(final_file_list), MAX_SEQUENCE_LENGTH): finalize_sequence_processing(str(uuid.uuid4()), final_file_list[i:i + MAX_SEQUENCE_LENGTH], final_directions[i:i + MAX_SEQUENCE_LENGTH], final_capture_times[i:i + MAX_SEQUENCE_LENGTH], import_path, verbose)
def insert_MAPJson(import_path, master_upload=False, verbose=False, rerun=False, skip_subfolders=False, skip_EXIF_insert=False, keep_original=False, video_file=None): # sanity check if video file is passed if video_file and not (os.path.isdir(video_file) or os.path.isfile(video_file)): print("Error, video path " + video_file + " does not exist, exiting...") sys.exit(1) # in case of video processing, adjust the import path if video_file: # set sampling path video_sampling_path = processing.sampled_video_frames_rootpath( video_file) import_path = os.path.join( os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join( os.path.dirname(video_file), video_sampling_path) # basic check for all if not import_path or not os.path.isdir(import_path): print("Error, import directory " + import_path + " does not exist, exiting...") sys.exit(1) # get list of file to process process_file_list = processing.get_process_file_list( import_path, "mapillary_image_description", rerun, verbose, skip_subfolders) if not len(process_file_list): print("No images to run process finalization") print( "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun" ) progress_count = 0 for image in process_file_list: progress_count += 1 if verbose: if (progress_count % 50) == 0: sys.stdout.write(".") if (progress_count % 5000) == 0: print("") # check the processing logs log_root = uploader.log_rootpath(image) duplicate_path = os.path.join(log_root, "duplicate") if os.path.isfile(duplicate_path): continue final_mapillary_image_description = processing.get_final_mapillary_image_description( log_root, image, master_upload, verbose, skip_EXIF_insert, keep_original) processing.create_and_log_process(image, "mapillary_image_description", "success", final_mapillary_image_description, verbose=verbose) print("Sub process finished")
def insert_MAPJson(import_path, master_upload=False, verbose=False, rerun=False, skip_subfolders=False, skip_EXIF_insert=False, keep_original=False, video_import_path=None, overwrite_all_EXIF_tags=False, overwrite_EXIF_time_tag=False, overwrite_EXIF_gps_tag=False, overwrite_EXIF_direction_tag=False, overwrite_EXIF_orientation_tag=False): # sanity check if video file is passed if video_import_path and not os.path.isdir(video_import_path) and not os.path.isfile(video_import_path): print("Error, video path " + video_import_path + " does not exist, exiting...") sys.exit(1) # in case of video processing, adjust the import path if video_import_path: # set sampling path video_sampling_path = "mapillary_sampled_video_frames" video_dirname = video_import_path if os.path.isdir( video_import_path) else os.path.dirname(video_import_path) import_path = os.path.join(os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join( os.path.abspath(video_dirname), video_sampling_path) # basic check for all if not import_path or not os.path.isdir(import_path): print_error("Error, import directory " + import_path + " does not exist, exiting...") sys.exit(1) # get list of file to process process_file_list = processing.get_process_file_list(import_path, "mapillary_image_description", rerun, verbose, skip_subfolders) if not len(process_file_list): print("No images to run process finalization") print("If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun") for image in tqdm(process_file_list, desc="Inserting mapillary image description in image EXIF"): # check the processing logs log_root = uploader.log_rootpath(image) duplicate_path = os.path.join(log_root, "duplicate") if os.path.isfile(duplicate_path): continue final_mapillary_image_description = processing.get_final_mapillary_image_description(log_root, image, master_upload, verbose, skip_EXIF_insert, keep_original, overwrite_all_EXIF_tags, overwrite_EXIF_time_tag, overwrite_EXIF_gps_tag, overwrite_EXIF_direction_tag, overwrite_EXIF_orientation_tag) processing.create_and_log_process(image, "mapillary_image_description", "success", final_mapillary_image_description, verbose=verbose) print("Sub process ended")
def process_import_meta_properties(import_path, orientation=None, device_make=None, device_model=None, GPS_accuracy=None, add_file_name=False, add_import_date=False, verbose=False, rerun=False, skip_subfolders=False, video_import_path=None, custom_meta_data=None, camera_uuid=None, windows_path=False, exclude_import_path=False, exclude_path=None): # sanity check if video file is passed if video_import_path and not os.path.isdir( video_import_path) and not os.path.isfile(video_import_path): print("Error, video path " + video_import_path + " does not exist, exiting...") sys.exit(1) # in case of video processing, adjust the import path if video_import_path: # set sampling path video_sampling_path = "mapillary_sampled_video_frames" video_dirname = video_import_path if os.path.isdir( video_import_path) else os.path.dirname(video_import_path) import_path = os.path.join( os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join( os.path.abspath(video_dirname), video_sampling_path) # basic check for all if not import_path or not os.path.isdir(import_path): print_error("Error, import directory " + import_path + " does not exist, exiting...") sys.exit(1) # get list of file to process process_file_list = processing.get_process_file_list( import_path, "import_meta_data_process", rerun, verbose, skip_subfolders) if not len(process_file_list): print("No images to run import meta data process") print( "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun" ) # map orientation from degrees to tags if orientation: orientation = processing.format_orientation(orientation) # read import meta from image EXIF and finalize the import # properties process for image in tqdm(process_file_list, desc="Processing image import properties"): import_meta_data_properties = get_import_meta_properties_exif( image, verbose) finalize_import_properties_process( image, import_path, orientation, device_make, device_model, GPS_accuracy, add_file_name, add_import_date, verbose, import_meta_data_properties, custom_meta_data, camera_uuid, windows_path, exclude_import_path, exclude_path) print("Sub process ended")
def process_import_meta_properties(import_path, orientation=None, device_make=None, device_model=None, GPS_accuracy=None, add_file_name=False, add_import_date=False, verbose=False, rerun=False, skip_subfolders=False, video_import_path=None, custom_meta_data=None, camera_uuid=None, windows_path=False, exclude_import_path=False, exclude_path=None): # sanity check if video file is passed if video_import_path and not os.path.isdir(video_import_path) and not os.path.isfile(video_import_path): print("Error, video path " + video_import_path + " does not exist, exiting...") sys.exit(1) # in case of video processing, adjust the import path if video_import_path: # set sampling path video_sampling_path = "mapillary_sampled_video_frames" video_dirname = video_import_path if os.path.isdir( video_import_path) else os.path.dirname(video_import_path) import_path = os.path.join(os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join( os.path.abspath(video_dirname), video_sampling_path) # basic check for all if not import_path or not os.path.isdir(import_path): print_error("Error, import directory " + import_path + " does not exist, exiting...") sys.exit(1) # get list of file to process process_file_list = processing.get_process_file_list(import_path, "import_meta_data_process", rerun, verbose, skip_subfolders) if not len(process_file_list): print("No images to run import meta data process") print("If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun") # map orientation from degrees to tags if orientation is not None: orientation = processing.format_orientation(orientation) # read import meta from image EXIF and finalize the import # properties process for image in tqdm(process_file_list, desc="Processing image import properties"): import_meta_data_properties = get_import_meta_properties_exif( image, verbose) finalize_import_properties_process(image, import_path, orientation, device_make, device_model, GPS_accuracy, add_file_name, add_import_date, verbose, import_meta_data_properties, custom_meta_data, camera_uuid, windows_path, exclude_import_path, exclude_path) print("Sub process ended")
def process_sequence_properties(import_path, cutoff_distance=600.0, cutoff_time=60.0, interpolate_directions=False, keep_duplicates=False, duplicate_distance=0.1, duplicate_angle=5, offset_angle=0.0, verbose=False, rerun=False, skip_subfolders=False, video_import_path=None): # sanity check if video file is passed if video_import_path and not os.path.isdir(video_import_path) and not os.path.isfile(video_import_path): print("Error, video path " + video_import_path + " does not exist, exiting...") sys.exit(1) # in case of video processing, adjust the import path if video_import_path: # set sampling path video_sampling_path = "mapillary_sampled_video_frames" video_dirname = video_import_path if os.path.isdir( video_import_path) else os.path.dirname(video_import_path) import_path = os.path.join(os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join( os.path.abspath(video_dirname), video_sampling_path) # basic check for all if not import_path or not os.path.isdir(import_path): print_error("Error, import directory " + import_path + " does not exist, exiting...") sys.exit(1) sequences = [] if skip_subfolders: process_file_list = processing.get_process_file_list(import_path, "sequence_process", rerun, verbose, True, import_path) if not len(process_file_list): if verbose: print("No images to run sequence process in root " + import_path) print( "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun") else: # LOAD TIME AND GPS POINTS ------------------------------------ file_list, capture_times, lats, lons, directions = processing.load_geotag_points( process_file_list, verbose) # --------------------------------------- # SPLIT SEQUENCES -------------------------------------- if len(capture_times) and len(lats) and len(lons): sequences.extend(processing.split_sequences( capture_times, lats, lons, file_list, directions, cutoff_time, cutoff_distance, verbose)) # --------------------------------------- else: # sequence limited to the root of the files for root, dirs, files in os.walk(import_path): if os.path.join(".mapillary", "logs") in root: continue if len(files): process_file_list = processing.get_process_file_list(import_path, "sequence_process", rerun, verbose, True, root) if not len(process_file_list): if verbose: print("No images to run sequence process in root " + root) print( "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun") continue # LOAD TIME AND GPS POINTS ------------------------------------ file_list, capture_times, lats, lons, directions = processing.load_geotag_points( process_file_list, verbose) # --------------------------------------- # SPLIT SEQUENCES -------------------------------------- if len(capture_times) and len(lats) and len(lons): sequences.extend(processing.split_sequences( capture_times, lats, lons, file_list, directions, cutoff_time, cutoff_distance, verbose)) # --------------------------------------- if not keep_duplicates: if verbose: print("Flagging images as duplicates if consecutive distance difference less than {} and angle difference less than {}".format( duplicate_distance, duplicate_angle)) # process for each sequence for sequence in sequences: file_list = sequence["file_list"] directions = sequence["directions"] latlons = sequence["latlons"] capture_times = sequence["capture_times"] # COMPUTE DIRECTIONS -------------------------------------- interpolated_directions = [compute_bearing(ll1[0], ll1[1], ll2[0], ll2[1]) for ll1, ll2 in zip(latlons[:-1], latlons[1:])] if len(interpolated_directions): interpolated_directions.append(interpolated_directions[-1]) else: interpolated_directions.append(directions[-1]) # use interpolated directions if direction not available or if flag for # interpolate_directions for i, d in enumerate(directions): directions[i] = d if ( d is not None and not interpolate_directions) else (interpolated_directions[i] + offset_angle) % 360.0 # --------------------------------------- # COMPUTE SPEED ------------------------------------------- computed_delta_ts = [(t1 - t0).total_seconds() for t0, t1 in zip(capture_times[:-1], capture_times[1:])] computed_distances = [gps_distance(l1, l0) for l0, l1 in zip(latlons[:-1], latlons[1:])] computed_speed = gps_speed( computed_distances, computed_delta_ts) # in meters/second if len([x for x in computed_speed if x > MAX_CAPTURE_SPEED]) > 0: print("Warning: The distance in sequence including images\n{}\nto\n{}\nis too large for the time difference (very high apparent capture speed). Are you sure timestamps and locations are correct?".format( file_list[0], file_list[-1])) # INTERPOLATE TIMESTAMPS, in case of identical timestamps capture_times = processing.interpolate_timestamp(capture_times) final_file_list = file_list[:] final_directions = directions[:] final_capture_times = capture_times[:] # FLAG DUPLICATES -------------------------------------- if not keep_duplicates: final_file_list = [file_list[0]] final_directions = [directions[0]] final_capture_times = [capture_times[0]] prev_latlon = latlons[0] prev_direction = directions[0] for i, filename in enumerate(file_list[1:]): log_root = uploader.log_rootpath(filename) duplicate_flag_path = os.path.join(log_root, "duplicate") sequence_process_success_path = os.path.join(log_root, "sequence_process_success") k = i + 1 distance = gps_distance(latlons[k], prev_latlon) if directions[k] is not None and prev_direction is not None: direction_diff = diff_bearing(directions[k], prev_direction) else: # dont use bearing difference if no bearings are # available direction_diff = 360 if distance < duplicate_distance and direction_diff < duplicate_angle: open(duplicate_flag_path, "w").close() open(sequence_process_success_path, "w").close() open(sequence_process_success_path + "_" + str(time.strftime("%Y_%m_%d_%H_%M_%S", time.gmtime())), "w").close() else: prev_latlon = latlons[k] prev_direction = directions[k] final_file_list.append(filename) final_directions.append(directions[k]) final_capture_times.append(capture_times[k]) # --------------------------------------- # FINALIZE ------------------------------------ for i in range(0, len(final_file_list), MAX_SEQUENCE_LENGTH): finalize_sequence_processing(str(uuid.uuid4()), final_file_list[i:i + MAX_SEQUENCE_LENGTH], final_directions[i:i + MAX_SEQUENCE_LENGTH], final_capture_times[i:i + MAX_SEQUENCE_LENGTH], import_path, verbose) print("Sub process ended")
def process_user_properties(import_path, user_name, organization_username=None, organization_key=None, private=False, master_upload=False, verbose=False, rerun=False, skip_subfolders=False, video_file=None): # sanity check if video file is passed if video_file and not (os.path.isdir(video_file) or os.path.isfile(video_file)): print("Error, video path " + video_file + " does not exist, exiting...") sys.exit(1) # in case of video processing, adjust the import path if video_file: # set sampling path video_sampling_path = processing.sampled_video_frames_rootpath( video_file) import_path = os.path.join( os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join( os.path.dirname(video_file), video_sampling_path) # basic check for all if not import_path or not os.path.isdir(import_path): print("Error, import directory " + import_path + " does not exist, exiting...") sys.exit(1) # get list of file to process process_file_list = processing.get_process_file_list( import_path, "user_process", rerun, verbose, skip_subfolders) if not len(process_file_list): print("No images to run user process") print( "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun" ) # sanity checks if not user_name: print("Error, must provide a valid user name, exiting...") processing.create_and_log_process_in_list(process_file_list, "user_process", "failed", verbose) sys.exit(1) if private and not organization_username and not organization_key: print( "Error, if the import belongs to a private repository, you need to provide a valid organization user name or key to which the private repository belongs to, exiting..." ) processing.create_and_log_process_in_list(process_file_list, "user_process", "failed", verbose) sys.exit(1) # function calls if not master_upload: user_properties = processing.user_properties(user_name, import_path, process_file_list, organization_username, organization_key, private, verbose) else: user_properties = processing.user_properties_master( user_name, import_path, process_file_list, organization_key, private, verbose) # write data and logs processing.create_and_log_process_in_list(process_file_list, "user_process", "success", verbose, user_properties) print("Sub process ended")
def process_sequence_properties(import_path, cutoff_distance=600.0, cutoff_time=60.0, interpolate_directions=False, flag_duplicates=False, duplicate_distance=0.1, duplicate_angle=5, offset_angle=0.0, verbose=False, rerun=False, skip_subfolders=False, video_import_path=None): # sanity check if video file is passed if video_import_path and not os.path.isdir(video_import_path): print("Error, video path " + video_import_path + " does not exist, exiting...") sys.exit(1) # in case of video processing, adjust the import path if video_import_path: # set sampling path video_sampling_path = "mapillary_sampled_video_frames" import_path = os.path.join( os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join( os.path.abspath(video_import_path), video_sampling_path) # basic check for all if not import_path or not os.path.isdir(import_path): print("Error, import directory " + import_path + " does not exist, exiting...") sys.exit(1) sequences = [] if skip_subfolders: process_file_list = processing.get_process_file_list( import_path, "sequence_process", rerun, verbose, True, import_path) if not len(process_file_list): if verbose: print("No images to run sequence process in root " + import_path) print( "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun" ) else: # LOAD TIME AND GPS POINTS ------------------------------------ file_list, capture_times, lats, lons, directions = processing.load_geotag_points( process_file_list, verbose) # --------------------------------------- # SPLIT SEQUENCES -------------------------------------- if len(capture_times) and len(lats) and len(lons): sequences.extend( processing.split_sequences(capture_times, lats, lons, file_list, directions, cutoff_time, cutoff_distance, verbose)) # --------------------------------------- else: # sequence limited to the root of the files for root, dirs, files in os.walk(import_path): if os.path.join(".mapillary", "logs") in root: continue if len(files): process_file_list = processing.get_process_file_list( import_path, "sequence_process", rerun, verbose, True, root) if not len(process_file_list): if verbose: print("No images to run sequence process in root " + root) print( "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun" ) continue # LOAD TIME AND GPS POINTS ------------------------------------ file_list, capture_times, lats, lons, directions = processing.load_geotag_points( process_file_list, verbose) # --------------------------------------- # SPLIT SEQUENCES -------------------------------------- if len(capture_times) and len(lats) and len(lons): sequences.extend( processing.split_sequences(capture_times, lats, lons, file_list, directions, cutoff_time, cutoff_distance, verbose)) # --------------------------------------- if flag_duplicates: if verbose: print( "Flagging images as duplicates if consecutive distance difference less than {} and angle difference less than {}" .format(duplicate_distance, duplicate_angle)) # process for each sequence for sequence in sequences: file_list = sequence["file_list"] directions = sequence["directions"] latlons = sequence["latlons"] capture_times = sequence["capture_times"] # COMPUTE DIRECTIONS -------------------------------------- interpolated_directions = [ compute_bearing(ll1[0], ll1[1], ll2[0], ll2[1]) for ll1, ll2 in zip(latlons[:-1], latlons[1:]) ] if len(interpolated_directions): interpolated_directions.append(interpolated_directions[-1]) else: interpolated_directions.append(directions[-1]) # use interpolated directions if direction not available or if flag for # interpolate_directions for i, d in enumerate(directions): directions[i] = d if ( d is not None and not interpolate_directions ) else (interpolated_directions[i] + offset_angle) % 360.0 # --------------------------------------- # INTERPOLATE TIMESTAMPS, in case of identical timestamps capture_times = processing.interpolate_timestamp(capture_times) final_file_list = file_list[:] final_directions = directions[:] final_capture_times = capture_times[:] # FLAG DUPLICATES -------------------------------------- if flag_duplicates: final_file_list = [file_list[0]] final_directions = [directions[0]] final_capture_times = [capture_times[0]] prev_latlon = latlons[0] prev_direction = directions[0] for i, filename in enumerate(file_list[1:]): log_root = uploader.log_rootpath(filename) duplicate_flag_path = os.path.join(log_root, "duplicate") sequence_process_success_path = os.path.join( log_root, "sequence_process_success") k = i + 1 distance = gps_distance(latlons[k], prev_latlon) if directions[k] is not None and prev_direction is not None: direction_diff = diff_bearing(directions[k], prev_direction) else: # dont use bearing difference if no bearings are # available direction_diff = 360 if distance < duplicate_distance and direction_diff < duplicate_angle: open(duplicate_flag_path, "w").close() open(sequence_process_success_path, "w").close() open( sequence_process_success_path + "_" + str(time.strftime("%Y_%m_%d_%H_%M_%S", time.gmtime())), "w").close() else: prev_latlon = latlons[k] prev_direction = directions[k] final_file_list.append(filename) final_directions.append(directions[k]) final_capture_times.append(capture_times[k]) # --------------------------------------- # FINALIZE ------------------------------------ for i in range(0, len(final_file_list), MAX_SEQUENCE_LENGTH): finalize_sequence_processing( str(uuid.uuid4()), final_file_list[i:i + MAX_SEQUENCE_LENGTH], final_directions[i:i + MAX_SEQUENCE_LENGTH], final_capture_times[i:i + MAX_SEQUENCE_LENGTH], import_path, verbose) print("Sub process ended")
def process_user_properties(import_path, user_name, organization_username=None, organization_key=None, private=False, master_upload=False, verbose=False, rerun=False, skip_subfolders=False): # basic check for all import_path = os.path.abspath(import_path) if not os.path.isdir(import_path): print("Error, import directory " + import_path + " doesnt not exist, exiting...") sys.exit() # get list of file to process process_file_list = processing.get_process_file_list(import_path, "user_process", rerun, verbose, skip_subfolders) if not len(process_file_list): print("No images to run user process") print("If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun") # sanity checks if not user_name: print("Error, must provide a valid user name, exiting...") processing.create_and_log_process_in_list(process_file_list, import_path, "user_process", "failed", verbose) return if private and not organization_username and not organization_key: print("Error, if the import belongs to a private repository, you need to provide a valid organization user name or key to which the private repository belongs to, exiting...") processing.create_and_log_process_in_list(process_file_list, import_path, "user_process", "failed", verbose) return # function calls if not master_upload: user_properties = processing.user_properties(user_name, import_path, process_file_list, organization_username, organization_key, private, verbose) else: user_properties = processing.user_properties_master(user_name, import_path, process_file_list, organization_key, private, verbose) # write data and logs processing.create_and_log_process_in_list(process_file_list, import_path, "user_process", "success", verbose, user_properties)