def upload(import_path, manual_done=False, verbose=False, skip_subfolders=False): ''' Upload local images to Mapillary Args: import_path: Directory path to where the images are stored. verbose: Print extra warnings and errors. skip_subfolders: Skip images stored in subdirectories. manual_done: Prompt user to confirm upload finalization. Returns: Images are uploaded to Mapillary and flagged locally as uploaded. ''' # basic check for all import_path = os.path.abspath(import_path) if not os.path.isdir(import_path): print("Error, import directory " + import_path + " doesnt not exist, exiting...") sys.exit() # get list of file to process total_file_list = uploader.get_total_file_list( import_path, skip_subfolders) upload_file_list = uploader.get_upload_file_list( import_path, skip_subfolders) failed_file_list = uploader.get_failed_upload_file_list( import_path, skip_subfolders) success_file_list = uploader.get_success_upload_file_list( import_path, skip_subfolders) if len(success_file_list) == len(total_file_list): print("All images have already been uploaded") sys.exit() if len(failed_file_list): upload_failed = raw_input( "Retry uploading previously failed image uploads? [y/n]: ") # if yes, add images to the upload list if upload_failed in ["y", "Y", "yes", "Yes"]: upload_file_list.extend(failed_file_list) # verify the images in the upload list, they need to have the image # description and certain MAP properties upload_file_list = [f for f in upload_file_list if verify_mapillary_tag(f)] if not len(upload_file_list): print("No images to upload.") print('Please check if all images contain the required Mapillary metadata. If not, you can use "mapillary_tools process" to add them') sys.exit() # get upload params params = {} for image in total_file_list: log_root = uploader.log_rootpath(import_path, image) upload_params_path = os.path.join( log_root, "upload_params_process.json") if os.path.isfile(upload_params_path): with open(upload_params_path, "rb") as jf: params[image] = json.load( jf, object_hook=uploader.ascii_encode_dict) # inform how many images are to be uploaded and how many are being skipped # from upload print("Uploading {} images with valid mapillary tags (Skipping {})".format( len(upload_file_list), len(total_file_list) - len(upload_file_list))) # call the actual upload, passing the list of images, the root of the # import and the upload params uploader.upload_file_list(upload_file_list, import_path, params) # finalize manual uploads if necessary finalize_file_list = uploader.get_finalize_file_list( import_path, skip_subfolders) # if manual uploads a DONE file needs to be uploaded to let the harvester # know the sequence is done uploading if len(finalize_file_list): finalize_all = 1 if manual_done: finalize_all = uploader.prompt_to_finalize("uploads") if finalize_all: # get the s3 locations of the sequences finalize_params = uploader.process_upload_finalization( finalize_file_list, params) uploader.finalize_upload(finalize_params) # flag finalization for each file uploader.flag_finalization(import_path, finalize_file_list) else: print("Uploads will not be finalized.") print("If you wish to finalize your uploads, run the upload tool again.") sys.exit() uploader.print_summary(upload_file_list)
def upload(import_path, manual_done=False, verbose=False, skip_subfolders=False, video_file=None, number_threads=None, max_attempts=None): ''' Upload local images to Mapillary Args: import_path: Directory path to where the images are stored. verbose: Print extra warnings and errors. skip_subfolders: Skip images stored in subdirectories. manual_done: Prompt user to confirm upload finalization. Returns: Images are uploaded to Mapillary and flagged locally as uploaded. ''' # sanity check if video file is passed if video_file and not (os.path.isdir(video_file) or os.path.isfile(video_file)): print("Error, video path " + video_file + " does not exist, exiting...") sys.exit(1) # in case of video processing, adjust the import path if video_file: # set sampling path video_sampling_path = processing.sampled_video_frames_rootpath( video_file) import_path = os.path.join( os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join( os.path.dirname(video_file), video_sampling_path) # basic check for all if not import_path or not os.path.isdir(import_path): print("Error, import directory " + import_path + " does not exist, exiting...") sys.exit(1) # get list of file to process total_file_list = uploader.get_total_file_list(import_path, skip_subfolders) upload_file_list = uploader.get_upload_file_list(import_path, skip_subfolders) failed_file_list = uploader.get_failed_upload_file_list( import_path, skip_subfolders) success_file_list = uploader.get_success_upload_file_list( import_path, skip_subfolders) if len(success_file_list) == len(total_file_list): print("All images have already been uploaded") sys.exit() if len(failed_file_list): upload_failed = raw_input( "Retry uploading previously failed image uploads? [y/n]: ") # if yes, add images to the upload list if upload_failed in ["y", "Y", "yes", "Yes"]: upload_file_list.extend(failed_file_list) # verify the images in the upload list, they need to have the image # description and certain MAP properties upload_file_list = [f for f in upload_file_list if verify_mapillary_tag(f)] if not len(upload_file_list): print("No images to upload.") print( 'Please check if all images contain the required Mapillary metadata. If not, you can use "mapillary_tools process" to add them' ) sys.exit(1) # get upload params params = {} for image in total_file_list: log_root = uploader.log_rootpath(image) upload_params_path = os.path.join(log_root, "upload_params_process.json") if os.path.isfile(upload_params_path): with open(upload_params_path, "rb") as jf: params[image] = json.load( jf, object_hook=uploader.ascii_encode_dict) # inform how many images are to be uploaded and how many are being skipped # from upload print("Uploading {} images with valid mapillary tags (Skipping {})".format( len(upload_file_list), len(total_file_list) - len(upload_file_list))) # call the actual upload, passing the list of images, the root of the # import and the upload params uploader.upload_file_list(upload_file_list, params, number_threads, max_attempts) # finalize manual uploads if necessary finalize_file_list = uploader.get_finalize_file_list( import_path, skip_subfolders) # if manual uploads a DONE file needs to be uploaded to let the harvester # know the sequence is done uploading if len(finalize_file_list): finalize_all = 1 if manual_done: finalize_all = uploader.prompt_to_finalize("uploads") if finalize_all: # get the s3 locations of the sequences finalize_params = uploader.process_upload_finalization( finalize_file_list, params) uploader.finalize_upload(finalize_params) # flag finalization for each file uploader.flag_finalization(finalize_file_list) else: print("Uploads will not be finalized.") print( "If you wish to finalize your uploads, run the upload tool again." ) sys.exit() uploader.print_summary(upload_file_list)
def upload(import_path, verbose=False, skip_subfolders=False, number_threads=None, max_attempts=None, video_import_path=None, dry_run=False, api_version=1.0): ''' Upload local images to Mapillary Args: import_path: Directory path to where the images are stored. verbose: Print extra warnings and errors. skip_subfolders: Skip images stored in subdirectories. Returns: Images are uploaded to Mapillary and flagged locally as uploaded. ''' # sanity check if video file is passed if video_import_path and (not os.path.isdir(video_import_path) and not os.path.isfile(video_import_path)): print("Error, video path " + video_import_path + " does not exist, exiting...") sys.exit(1) # in case of video processing, adjust the import path if video_import_path: # set sampling path video_sampling_path = "mapillary_sampled_video_frames" video_dirname = video_import_path if os.path.isdir( video_import_path) else os.path.dirname(video_import_path) import_path = os.path.join( os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join( os.path.abspath(video_dirname), video_sampling_path) # basic check for all if not import_path or not os.path.isdir(import_path): print("Error, import directory " + import_path + " does not exist, exiting...") sys.exit(1) # get list of file to process total_file_list = uploader.get_total_file_list(import_path, skip_subfolders) upload_file_list = uploader.get_upload_file_list(import_path, skip_subfolders) failed_file_list = uploader.get_failed_upload_file_list( import_path, skip_subfolders) success_file_list = uploader.get_success_upload_file_list( import_path, skip_subfolders) to_finalize_file_list = uploader.get_finalize_file_list( import_path, skip_subfolders) if len(success_file_list) == len(total_file_list): print("All images have already been uploaded") else: if len(failed_file_list): upload_failed = raw_input( "Retry uploading previously failed image uploads? [y/n]: " ) if not ipc.is_enabled() else 'y' # if yes, add images to the upload list if upload_failed in ["y", "Y", "yes", "Yes"]: upload_file_list.extend(failed_file_list) # verify the images in the upload list, they need to have the image # description and certain MAP properties upload_file_list = [ f for f in upload_file_list if verify_mapillary_tag(f) ] if not len(upload_file_list) and not len(to_finalize_file_list): print("No images to upload.") print( 'Please check if all images contain the required Mapillary metadata. If not, you can use "mapillary_tools process" to add them' ) sys.exit(1) if len(upload_file_list): # get upload params for the manual upload images, group them per sequence # and separate direct upload images params = {} list_per_sequence_mapping = {} direct_upload_file_list = [] for image in upload_file_list: log_root = uploader.log_rootpath(image) upload_params_path = os.path.join( log_root, "upload_params_process.json") if os.path.isfile(upload_params_path): with open(upload_params_path, "rb") as jf: params[image] = json.load( jf, object_hook=uploader.ascii_encode_dict) sequence = params[image]["key"] if sequence in list_per_sequence_mapping: list_per_sequence_mapping[sequence].append(image) else: list_per_sequence_mapping[sequence] = [image] else: direct_upload_file_list.append(image) # inform how many images are to be uploaded and how many are being skipped # from upload print( "Uploading {} images with valid mapillary tags (Skipping {})". format(len(upload_file_list), len(total_file_list) - len(upload_file_list))) if api_version == 2.0: uploder.uploadfile_list if len(direct_upload_file_list): uploader.upload_file_list_direct(direct_upload_file_list, number_threads, max_attempts) for idx, sequence in enumerate(list_per_sequence_mapping): uploader.upload_file_list_manual( list_per_sequence_mapping[sequence], params, idx, number_threads, max_attempts) if len(to_finalize_file_list): params = {} sequences = [] for image in to_finalize_file_list: log_root = uploader.log_rootpath(image) upload_params_path = os.path.join( log_root, "upload_params_process.json") if os.path.isfile(upload_params_path): with open(upload_params_path, "rb") as jf: image_params = json.load( jf, object_hook=uploader.ascii_encode_dict) sequence = image_params["key"] if sequence not in sequences: params[image] = image_params sequences.append(sequence) for image in params: uploader.upload_done_file(**params[image]) uploader.flag_finalization(to_finalize_file_list) uploader.print_summary(upload_file_list)
def post_process(import_path, split_import_path=None, video_import_path=None, summarize=False, move_images=False, move_duplicates=False, move_uploaded=False, save_as_json=False, list_file_status=False, push_images=False, skip_subfolders=False, verbose=False, save_local_mapping=False): # return if nothing specified if not summarize and not move_images and not list_file_status and not push_images and not move_duplicates and not move_uploaded and not save_local_mapping: print("No post processing action specified.") return # sanity check if video file is passed if video_import_path and not os.path.isdir(video_import_path): print("Error, video path " + video_import_path + " does not exist, exiting...") sys.exit(1) # in case of video processing, adjust the import path if video_import_path: # set sampling path video_sampling_path = "mapillary_sampled_video_frames" import_path = os.path.join(os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join( os.path.abspath(video_import_path), video_sampling_path) # basic check for all if not import_path or not os.path.isdir(import_path): print("Error, import directory " + import_path + " does not exist, exiting...") sys.exit(1) if save_local_mapping: local_mapping = save_local_mapping(import_path) with open(local_mapping_filepath, "w") as csvfile: csvwriter = csv.writer(csvfile, delimiter=",") for row in local_mapping: csvwriter.writerow(row) else: print("Reading import logs for import path {}...".format(import_path)) # collect logs summary_dict = {} status_list_dict = {} total_files = uploader.get_total_file_list(import_path) total_files_count = len(total_files) # upload logs uploaded_files = uploader.get_success_upload_file_list( import_path, skip_subfolders) uploaded_files_count = len(uploaded_files) failed_upload_files = uploader.get_failed_upload_file_list( import_path, skip_subfolders) failed_upload_files_count = len(failed_upload_files) to_be_finalized_files = uploader.get_finalize_file_list(import_path) to_be_finalized_files_count = len(to_be_finalized_files) summary_dict["total images"] = total_files_count summary_dict["upload summary"] = { "successfully uploaded": uploaded_files_count, "failed uploads": failed_upload_files_count, "uploaded to be finalized": to_be_finalized_files_count } status_list_dict["successfully uploaded"] = uploaded_files status_list_dict["failed uploads"] = failed_upload_files status_list_dict["uploaded to be finalized"] = to_be_finalized_files # process logs summary_dict["process summary"] = {} process_steps = ["user_process", "import_meta_process", "geotag_process", "sequence_process", "upload_params_process", "mapillary_image_description"] process_status = ["success", "failed"] for step in process_steps: process_success = len(processing.get_process_status_file_list( import_path, step, "success", skip_subfolders)) process_failed = len(processing.get_process_status_file_list( import_path, step, "failed", skip_subfolders)) summary_dict["process summary"][step] = { "failed": process_failed, "success": process_success } duplicates_file_list = processing.get_duplicate_file_list( import_path, skip_subfolders) duplicates_file_list_count = len(duplicates_file_list) summary_dict["process summary"]["duplicates"] = duplicates_file_list_count status_list_dict["duplicates"] = duplicates_file_list # processed for upload to_be_uploaded_files = uploader.get_upload_file_list( import_path, skip_subfolders) to_be_uploaded_files_count = len(to_be_uploaded_files) summary_dict["process summary"]["processed_not_yet_uploaded"] = to_be_uploaded_files_count status_list_dict["processed_not_yet_uploaded"] = to_be_uploaded_files # summary if summarize: print("") print("Import summary for import path {} :".format(import_path)) print(json.dumps(summary_dict, indent=4)) ipc.send('summary', summary_dict) if save_as_json: try: processing.save_json(summary_dict, os.path.join( import_path, "mapillary_import_summary.json")) except Exception as e: print("Could not save summary into json at {}, due to {}".format( os.path.join(import_path, "mapillary_import_summary.json"), e)) # list file status if list_file_status: print("") print("List of file status for import path {} :".format(import_path)) print(json.dumps(status_list_dict, indent=4)) if save_as_json: try: processing.save_json(status_list_dict, os.path.join( import_path, "mapillary_import_image_status_list.json")) except Exception as e: print("Could not save image status list into json at {}, due to {}".format( os.path.join(import_path, "mapillary_import_image_status_list.json"), e)) # push images that were uploaded successfully # collect upload params if push_images: to_be_pushed_files = uploader.get_success_only_manual_upload_file_list( import_path, skip_subfolders) params = {} for image in tqdm(to_be_pushed_files, desc="Pushing images"): log_root = uploader.log_rootpath(image) upload_params_path = os.path.join( log_root, "upload_params_process.json") if os.path.isfile(upload_params_path): with open(upload_params_path, "rb") as jf: params[image] = json.load( jf, object_hook=uploader.ascii_encode_dict) # get the s3 locations of the sequences finalize_params = uploader.process_upload_finalization( to_be_pushed_files, params) uploader.finalize_upload(finalize_params) # flag finalization for each file uploader.flag_finalization(to_be_pushed_files) if move_images or move_duplicates or move_uploaded: print("") print("Note that images will be moved along with their mapillary logs in order to preserve the import status") defualt_split_import_path = os.path.join( import_path, "mapillary_import_split_images") if not split_import_path: final_split_path = defualt_split_import_path print("") print( "Split import path not provided and will therefore be set to default path {}".format(defualt_split_import_path)) if split_import_path: if not os.path.isfile(split_import_path): final_split_path = defualt_split_import_path print("Split import path does not exist, split import path will be set to default path {}".format( defualt_split_import_path)) else: final_split_path = split_import_path print("") print("Splitting import path {} into {} based on image import status...".format( import_path, final_split_path)) if move_images: move_duplicates = True move_uploaded = True # move failed uploads if not len(failed_upload_files): print("") print( "There are no failed upload images in the specified import path.") else: failed_upload_path = os.path.join( final_split_path, "upload_failed") if not os.path.isdir(failed_upload_path): os.makedirs(failed_upload_path) for failed in failed_upload_files: failed_upload_image_path = os.path.join( failed_upload_path, os.path.basename(failed)) os.rename(failed, failed_upload_path) failed_upload_log_path = os.path.dirname(uploader.log_rootpath( failed_upload_image_path)) if not os.path.isdir(failed_upload_log_path): os.makedirs(failed_upload_log_path) shutil.move(uploader.log_rootpath(failed), failed_upload_log_path) print("") print("Done moving failed upload images to {}".format( failed_upload_path)) if move_duplicates: if not len(duplicates_file_list): print("") print("There were no duplicates flagged in the specified import path. If you are processing the images with mapillary_tools and would like to flag duplicates, you must specify --advanced --flag_duplicates") else: duplicate_path = os.path.join( final_split_path, "duplicates") if not os.path.isdir(duplicate_path): os.makedirs(duplicate_path) for duplicate in duplicates_file_list: duplicate_image_path = os.path.join( duplicate_path, os.path.basename(duplicate)) os.rename(duplicate, duplicate_image_path) duplicate_log_path = os.path.dirname(uploader.log_rootpath( duplicate_image_path)) if not os.path.isdir(duplicate_log_path): os.makedirs(duplicate_log_path) shutil.move(uploader.log_rootpath(duplicate), duplicate_log_path) print("") print("Done moving duplicate images to {}".format( duplicate_path)) if move_uploaded: if not len(uploaded_files): print("") print( "There are no successfuly uploaded images in the specified import path.") else: upload_success_path = os.path.join( final_split_path, "upload_success") if not os.path.isdir(upload_success_path): os.makedirs(upload_success_path) for uploaded in uploaded_files: uploaded_image_path = os.path.join( upload_success_path, os.path.basename(uploaded)) os.rename(uploaded, upload_success_path) uploaded_log_path = os.path.dirname(uploader.log_rootpath( uploaded_image_path)) if not os.path.isdir(uploaded_log_path): os.makedirs(uploaded_log_path) shutil.move(uploader.log_rootpath(uploaded), uploaded_log_path) print("") print("Done moving successfully uploaded images to {}".format( upload_success_path))
def post_process(import_path, split_import_path=None, video_import_path=None, summarize=False, move_all_images=False, move_duplicates=False, move_uploaded=False, move_sequences=False, save_as_json=False, list_file_status=False, push_images=False, skip_subfolders=False, verbose=False, save_local_mapping=False): # return if nothing specified if not any([summarize, move_all_images, list_file_status, push_images, move_duplicates, move_uploaded, save_local_mapping, move_sequences]): print("No post processing action specified.") return # sanity check if video file is passed if video_import_path and not os.path.isdir(video_import_path) and not os.path.isfile(video_import_path): print("Error, video path " + video_import_path + " does not exist, exiting...") sys.exit(1) if move_all_images: move_sequences = True move_duplicates = True move_uploaded = True # in case of video processing, adjust the import path if video_import_path: # set sampling path video_sampling_path = "mapillary_sampled_video_frames" video_dirname = video_import_path if os.path.isdir( video_import_path) else os.path.dirname(video_import_path) import_path = os.path.join(os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join( os.path.abspath(video_dirname), video_sampling_path) # basic check for all if not import_path or not os.path.isdir(import_path): print("Error, import directory " + import_path + " does not exist, exiting...") sys.exit(1) if save_local_mapping: local_mapping = save_local_mapping(import_path) with open(local_mapping_filepath, "w") as csvfile: csvwriter = csv.writer(csvfile, delimiter=",") for row in local_mapping: csvwriter.writerow(row) if push_images: to_be_pushed_files = uploader.get_success_only_manual_upload_file_list( import_path, skip_subfolders) params = {} for image in tqdm(to_be_pushed_files, desc="Pushing images"): log_root = uploader.log_rootpath(image) upload_params_path = os.path.join( log_root, "upload_params_process.json") if os.path.isfile(upload_params_path): with open(upload_params_path, "rb") as jf: params[image] = json.load( jf, object_hook=uploader.ascii_encode_dict) # get the s3 locations of the sequences finalize_params = uploader.process_upload_finalization( to_be_pushed_files, params) uploader.finalize_upload(finalize_params) # flag finalization for each file uploader.flag_finalization(to_be_pushed_files) if any([summarize, list_file_status, move_uploaded]): # upload logs uploaded_files = uploader.get_success_upload_file_list( import_path, skip_subfolders) uploaded_files_count = len(uploaded_files) failed_upload_files = uploader.get_failed_upload_file_list( import_path, skip_subfolders) failed_upload_files_count = len(failed_upload_files) to_be_finalized_files = uploader.get_finalize_file_list(import_path) to_be_finalized_files_count = len(to_be_finalized_files) to_be_uploaded_files = uploader.get_upload_file_list( import_path, skip_subfolders) to_be_uploaded_files_count = len(to_be_uploaded_files) if any([summarize, move_sequences]): total_files = uploader.get_total_file_list(import_path) total_files_count = len(total_files) if any([summarize, move_duplicates, list_file_status]): duplicates_file_list = processing.get_duplicate_file_list( import_path, skip_subfolders) duplicates_file_list_count = len(duplicates_file_list) if summarize: summary_dict = {} summary_dict["total images"] = total_files_count summary_dict["upload summary"] = { "successfully uploaded": uploaded_files_count, "failed uploads": failed_upload_files_count, "uploaded to be finalized": to_be_finalized_files_count } # process logs summary_dict["process summary"] = {} process_steps = ["user_process", "import_meta_process", "geotag_process", "sequence_process", "upload_params_process", "mapillary_image_description"] process_status = ["success", "failed"] for step in process_steps: process_success = len(processing.get_process_status_file_list( import_path, step, "success", skip_subfolders)) process_failed = len(processing.get_process_status_file_list( import_path, step, "failed", skip_subfolders)) summary_dict["process summary"][step] = { "failed": process_failed, "success": process_success } summary_dict["process summary"]["duplicates"] = duplicates_file_list_count summary_dict["process summary"]["processed_not_yet_uploaded"] = to_be_uploaded_files_count print("Import summary for import path {} :".format(import_path)) print(json.dumps(summary_dict, indent=4)) ipc.send('summary', summary_dict) if save_as_json: try: processing.save_json(summary_dict, os.path.join( import_path, "mapillary_import_summary.json")) except Exception as e: print("Could not save summary into json at {}, due to {}".format( os.path.join(import_path, "mapillary_import_summary.json"), e)) if list_file_status: status_list_dict = {} status_list_dict["successfully uploaded"] = uploaded_files status_list_dict["failed uploads"] = failed_upload_files status_list_dict["uploaded to be finalized"] = to_be_finalized_files status_list_dict["duplicates"] = duplicates_file_list status_list_dict["processed_not_yet_uploaded"] = to_be_uploaded_files print("") print("List of file status for import path {} :".format(import_path)) print(json.dumps(status_list_dict, indent=4)) if save_as_json: try: processing.save_json(status_list_dict, os.path.join( import_path, "mapillary_import_image_status_list.json")) except Exception as e: print("Could not save image status list into json at {}, due to {}".format( os.path.join(import_path, "mapillary_import_image_status_list.json"), e)) split_import_path = split_import_path if split_import_path else import_path if any([move_sequences, move_duplicates, move_uploaded]): if not os.path.isdir(split_import_path): print("Split import path {} does not exist.".format( split_import_path)) sys.exit(1) destination_mapping = {} if move_duplicates: for image in duplicates_file_list: destination_mapping[image] = {"basic": ["duplicates"]} if move_uploaded: for image in uploaded_files: if image in destination_mapping: destination_mapping[image]["basic"].append("uploaded") else: destination_mapping[image] = {"basic": ["uploaded"]} for image in failed_upload_files: if image in destination_mapping: destination_mapping[image]["basic"].append("failed_upload") else: destination_mapping[image] = {"basic": ["failed_upload"]} for image in to_be_finalized_files: if image in destination_mapping: destination_mapping[image]["basic"].append( "uploaded_not_finalized") else: destination_mapping[image] = { "basic": ["uploaded_not_finalized"]} for image in to_be_uploaded_files: if image in destination_mapping: destination_mapping[image]["basic"].append("to_be_uploaded") else: destination_mapping[image] = {"basic": ["to_be_uploaded"]} if move_sequences: destination_mapping = map_images_to_sequences( destination_mapping, total_files) for image in destination_mapping: basic_destination = destination_mapping[image]["basic"] if "basic" in destination_mapping[image] else [ ] sequence_destination = destination_mapping[image][ "sequence"] if "sequence" in destination_mapping[image] else "" image_destination_path = os.path.join(*([split_import_path] + basic_destination + [ os.path.dirname(image[len(os.path.abspath(import_path)) + 1:])] + [sequence_destination, os.path.basename(image)])) if not os.path.isdir(os.path.dirname(image_destination_path)): os.makedirs(os.path.dirname(image_destination_path)) os.rename(image, image_destination_path) image_logs_dir = uploader.log_rootpath(image) destination_logs_dir = uploader.log_rootpath(image_destination_path) if not os.path.isdir(image_logs_dir): continue if not os.path.isdir(os.path.dirname(destination_logs_dir)): os.makedirs(os.path.dirname(destination_logs_dir)) os.rename(image_logs_dir, destination_logs_dir)
def post_process(import_path, split_import_path=None, video_import_path=None, summarize=False, move_all_images=False, move_duplicates=False, move_uploaded=False, move_sequences=False, save_as_json=False, list_file_status=False, push_images=False, skip_subfolders=False, verbose=False, save_local_mapping=False): # return if nothing specified if not any([ summarize, move_all_images, list_file_status, push_images, move_duplicates, move_uploaded, save_local_mapping, move_sequences ]): print("No post processing action specified.") return # sanity check if video file is passed if video_import_path and not os.path.isdir( video_import_path) and not os.path.isfile(video_import_path): print("Error, video path " + video_import_path + " does not exist, exiting...") sys.exit(1) if move_all_images: move_sequences = True move_duplicates = True move_uploaded = True # in case of video processing, adjust the import path if video_import_path: # set sampling path video_sampling_path = "mapillary_sampled_video_frames" video_dirname = video_import_path if os.path.isdir( video_import_path) else os.path.dirname(video_import_path) import_path = os.path.join( os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join( os.path.abspath(video_dirname), video_sampling_path) # basic check for all if not import_path or not os.path.isdir(import_path): print("Error, import directory " + import_path + " does not exist, exiting...") sys.exit(1) if save_local_mapping: local_mapping = save_local_mapping(import_path) with open(local_mapping_filepath, "w") as csvfile: csvwriter = csv.writer(csvfile, delimiter=",") for row in local_mapping: csvwriter.writerow(row) if push_images: to_be_pushed_files = uploader.get_success_only_manual_upload_file_list( import_path, skip_subfolders) params = {} for image in tqdm(to_be_pushed_files, desc="Pushing images"): log_root = uploader.log_rootpath(image) upload_params_path = os.path.join(log_root, "upload_params_process.json") if os.path.isfile(upload_params_path): with open(upload_params_path, "rb") as jf: params[image] = json.load( jf, object_hook=uploader.ascii_encode_dict) # get the s3 locations of the sequences finalize_params = uploader.process_upload_finalization( to_be_pushed_files, params) uploader.finalize_upload(finalize_params) # flag finalization for each file uploader.flag_finalization(to_be_pushed_files) if any([summarize, list_file_status, move_uploaded]): # upload logs uploaded_files = uploader.get_success_upload_file_list( import_path, skip_subfolders) uploaded_files_count = len(uploaded_files) failed_upload_files = uploader.get_failed_upload_file_list( import_path, skip_subfolders) failed_upload_files_count = len(failed_upload_files) to_be_finalized_files = uploader.get_finalize_file_list(import_path) to_be_finalized_files_count = len(to_be_finalized_files) to_be_uploaded_files = uploader.get_upload_file_list( import_path, skip_subfolders) to_be_uploaded_files_count = len(to_be_uploaded_files) if any([summarize, move_sequences]): total_files = uploader.get_total_file_list(import_path) total_files_count = len(total_files) if any([summarize, move_duplicates, list_file_status]): duplicates_file_list = processing.get_duplicate_file_list( import_path, skip_subfolders) duplicates_file_list_count = len(duplicates_file_list) if summarize: summary_dict = {} summary_dict["total images"] = total_files_count summary_dict["upload summary"] = { "successfully uploaded": uploaded_files_count, "failed uploads": failed_upload_files_count, "uploaded to be finalized": to_be_finalized_files_count } # process logs summary_dict["process summary"] = {} process_steps = [ "user_process", "import_meta_process", "geotag_process", "sequence_process", "upload_params_process", "mapillary_image_description" ] process_status = ["success", "failed"] for step in process_steps: process_success = len( processing.get_process_status_file_list( import_path, step, "success", skip_subfolders)) process_failed = len( processing.get_process_status_file_list( import_path, step, "failed", skip_subfolders)) summary_dict["process summary"][step] = { "failed": process_failed, "success": process_success } summary_dict["process summary"][ "duplicates"] = duplicates_file_list_count summary_dict["process summary"][ "processed_not_yet_uploaded"] = to_be_uploaded_files_count print("Import summary for import path {} :".format(import_path)) print(json.dumps(summary_dict, indent=4)) ipc.send('summary', summary_dict) if save_as_json: try: processing.save_json( summary_dict, os.path.join(import_path, "mapillary_import_summary.json")) except Exception as e: print( "Could not save summary into json at {}, due to {}".format( os.path.join(import_path, "mapillary_import_summary.json"), e)) if list_file_status: status_list_dict = {} status_list_dict["successfully uploaded"] = uploaded_files status_list_dict["failed uploads"] = failed_upload_files status_list_dict["uploaded to be finalized"] = to_be_finalized_files status_list_dict["duplicates"] = duplicates_file_list status_list_dict["processed_not_yet_uploaded"] = to_be_uploaded_files print("") print("List of file status for import path {} :".format(import_path)) print(json.dumps(status_list_dict, indent=4)) if save_as_json: try: processing.save_json( status_list_dict, os.path.join(import_path, "mapillary_import_image_status_list.json")) except Exception as e: print( "Could not save image status list into json at {}, due to {}" .format( os.path.join( import_path, "mapillary_import_image_status_list.json"), e)) split_import_path = split_import_path if split_import_path else import_path if any([move_sequences, move_duplicates, move_uploaded]): if not os.path.isdir(split_import_path): print("Split import path {} does not exist.".format( split_import_path)) sys.exit(1) destination_mapping = {} if move_duplicates: for image in duplicates_file_list: destination_mapping[image] = {"basic": ["duplicates"]} if move_uploaded: for image in uploaded_files: if image in destination_mapping: destination_mapping[image]["basic"].append("uploaded") else: destination_mapping[image] = {"basic": ["uploaded"]} for image in failed_upload_files: if image in destination_mapping: destination_mapping[image]["basic"].append("failed_upload") else: destination_mapping[image] = {"basic": ["failed_upload"]} for image in to_be_finalized_files: if image in destination_mapping: destination_mapping[image]["basic"].append( "uploaded_not_finalized") else: destination_mapping[image] = { "basic": ["uploaded_not_finalized"] } for image in to_be_uploaded_files: if image in destination_mapping: destination_mapping[image]["basic"].append("to_be_uploaded") else: destination_mapping[image] = {"basic": ["to_be_uploaded"]} if move_sequences: destination_mapping = map_images_to_sequences(destination_mapping, total_files) for image in destination_mapping: basic_destination = destination_mapping[image][ "basic"] if "basic" in destination_mapping[image] else [] sequence_destination = destination_mapping[image][ "sequence"] if "sequence" in destination_mapping[image] else "" image_destination_path = os.path.join(*( [split_import_path] + basic_destination + [os.path.dirname(image[len(os.path.abspath(import_path)) + 1:])] + [sequence_destination, os.path.basename(image)])) if not os.path.isdir(os.path.dirname(image_destination_path)): os.makedirs(os.path.dirname(image_destination_path)) os.rename(image, image_destination_path) image_logs_dir = uploader.log_rootpath(image) destination_logs_dir = uploader.log_rootpath(image_destination_path) if not os.path.isdir(image_logs_dir): continue if not os.path.isdir(os.path.dirname(destination_logs_dir)): os.makedirs(os.path.dirname(destination_logs_dir)) os.rename(image_logs_dir, destination_logs_dir)
def upload(import_path, verbose=False, skip_subfolders=False, number_threads=None, max_attempts=None, video_import_path=None, dry_run=False,api_version=1.0): ''' Upload local images to Mapillary Args: import_path: Directory path to where the images are stored. verbose: Print extra warnings and errors. skip_subfolders: Skip images stored in subdirectories. Returns: Images are uploaded to Mapillary and flagged locally as uploaded. ''' # sanity check if video file is passed if video_import_path and (not os.path.isdir(video_import_path) and not os.path.isfile(video_import_path)): print("Error, video path " + video_import_path + " does not exist, exiting...") sys.exit(1) # in case of video processing, adjust the import path if video_import_path: # set sampling path video_sampling_path = "mapillary_sampled_video_frames" video_dirname = video_import_path if os.path.isdir( video_import_path) else os.path.dirname(video_import_path) import_path = os.path.join(os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join( os.path.abspath(video_dirname), video_sampling_path) # basic check for all if not import_path or not os.path.isdir(import_path): print("Error, import directory " + import_path + " does not exist, exiting...") sys.exit(1) # get list of file to process total_file_list = uploader.get_total_file_list( import_path, skip_subfolders) upload_file_list = uploader.get_upload_file_list( import_path, skip_subfolders) failed_file_list = uploader.get_failed_upload_file_list( import_path, skip_subfolders) success_file_list = uploader.get_success_upload_file_list( import_path, skip_subfolders) to_finalize_file_list = uploader.get_finalize_file_list( import_path, skip_subfolders) if len(success_file_list) == len(total_file_list): print("All images have already been uploaded") else: if len(failed_file_list): upload_failed = raw_input( "Retry uploading previously failed image uploads? [y/n]: ") if not ipc.is_enabled() else 'y' # if yes, add images to the upload list if upload_failed in ["y", "Y", "yes", "Yes"]: upload_file_list.extend(failed_file_list) # verify the images in the upload list, they need to have the image # description and certain MAP properties upload_file_list = [ f for f in upload_file_list if verify_mapillary_tag(f)] if not len(upload_file_list) and not len(to_finalize_file_list): print("No images to upload.") print('Please check if all images contain the required Mapillary metadata. If not, you can use "mapillary_tools process" to add them') sys.exit(1) if len(upload_file_list): # get upload params for the manual upload images, group them per sequence # and separate direct upload images params = {} list_per_sequence_mapping = {} direct_upload_file_list = [] for image in upload_file_list: log_root = uploader.log_rootpath(image) upload_params_path = os.path.join( log_root, "upload_params_process.json") if os.path.isfile(upload_params_path): with open(upload_params_path, "rb") as jf: params[image] = json.load( jf, object_hook=uploader.ascii_encode_dict) sequence = params[image]["key"] if sequence in list_per_sequence_mapping: list_per_sequence_mapping[sequence].append(image) else: list_per_sequence_mapping[sequence] = [image] else: direct_upload_file_list.append(image) # inform how many images are to be uploaded and how many are being skipped # from upload print("Uploading {} images with valid mapillary tags (Skipping {})".format( len(upload_file_list), len(total_file_list) - len(upload_file_list))) if api_version==2.0: uploder.uploadfile_list if len(direct_upload_file_list): uploader.upload_file_list_direct( direct_upload_file_list, number_threads, max_attempts) for idx, sequence in enumerate(list_per_sequence_mapping): uploader.upload_file_list_manual( list_per_sequence_mapping[sequence], params, idx, number_threads, max_attempts) if len(to_finalize_file_list): params = {} sequences = [] for image in to_finalize_file_list: log_root = uploader.log_rootpath(image) upload_params_path = os.path.join( log_root, "upload_params_process.json") if os.path.isfile(upload_params_path): with open(upload_params_path, "rb") as jf: image_params = json.load( jf, object_hook=uploader.ascii_encode_dict) sequence = image_params["key"] if sequence not in sequences: params[image] = image_params sequences.append(sequence) for image in params: uploader.upload_done_file(**params[image]) uploader.flag_finalization(to_finalize_file_list) uploader.print_summary(upload_file_list)