def insert_video_frame_timestamp(import_path, start_time, sample_interval, duration_ratio=1.0, verbose=False): # get list of file to process frame_list = uploader.get_total_file_list(import_path) if not len(frame_list): if verbose: print("No video frames were sampled.") return video_frame_timestamps = timestamps_from_filename(frame_list, start_time, sample_interval, duration_ratio) for image, timestamp in zip(frame_list, video_frame_timestamps): try: exif_edit = ExifEdit(image) exif_edit.add_date_time_original(timestamp) exif_edit.write() except: if verbose: print("Could not insert timestamp into video frame " + os.path.basename(image)[:-4]) continue
def insert_video_frame_timestamp(video_filename, video_sampling_path, start_time, sample_interval=2.0, duration_ratio=1.0, verbose=False): # get list of file to process frame_list = uploader.get_total_file_list(video_sampling_path) if not len(frame_list): # WARNING LOG print("No video frames were sampled.") return video_frame_timestamps = timestamps_from_filename(video_filename, frame_list, start_time, sample_interval, duration_ratio) for image, timestamp in tqdm(zip(frame_list, video_frame_timestamps), desc="Inserting frame capture time"): try: exif_edit = ExifEdit(image) exif_edit.add_date_time_original(timestamp) exif_edit.write() except: # ERROR LOG print("Could not insert timestamp into video frame " + os.path.basename(image)[:-4]) continue
def interpolation(data, file_in_path=None, file_format="csv", time_column=0, delimiter=",", time_utc=False, time_format="%Y-%m-%dT%H:%M:%SZ", header=False, keep_original=False, import_path=None, max_time_delta=1, verbose=False): if not data: print("Error, you must specify the data for interpolation.") print('Choose between "missing_gps" or "identical_timestamps"') sys.exit(1) if not import_path and not file_in_path: print( "Error, you must specify a path to data, either path to directory with images or path to an external log file." ) sys.exit(1) if file_in_path: if not os.path.isfile(file_in_path): print("Error, specified input file does not exist, exiting...") sys.exit(1) if file_format != "csv": print( "Only csv file format is supported at the moment, exiting...") sys.exit(1) csv_data = process_csv.read_csv(file_in_path, delimiter=delimiter, header=header) if data == "identical_timestamps": timestamps = csv_data[time_column] timestamps_datetime = [ process_csv.format_time(timestamp, time_utc, time_format) for timestamp in timestamps ] timestamps_interpolated = processing.interpolate_timestamp( timestamps_datetime) csv_data[time_column] = format_datetime(timestamps_interpolated, time_utc, time_format) file_out = file_in_path if not keep_original else file_in_path[: -4] + "_processed." + file_format with open(file_out, "w") as csvfile: csvwriter = csv.writer(csvfile, delimiter=delimiter) for row in zip(*csv_data): csvwriter.writerow(row) sys.exit() elif data == "missing_gps": print( "Error, missing gps interpolation in an external log file not supported yet, exiting..." ) sys.exit(1) else: print("Error unsupported data for interpolation, exiting...") sys.exit(1) if import_path: if not os.path.isdir(import_path): print("Error, specified import path does not exist, exiting...") sys.exit(1) # get list of files to process process_file_list = uploader.get_total_file_list(import_path) if not len(process_file_list): print("No images found in the import path " + import_path) sys.exit(1) if data == "missing_gps": # get geotags from images and a list of tuples with images missing geotags # and their timestamp geotags, missing_geotags = processing.get_images_geotags( process_file_list) if not len(missing_geotags): print("No images in directory {} missing geotags, exiting...". format(import_path)) sys.exit(1) if not len(geotags): print("No images in directory {} with geotags.".format( import_path)) sys.exit(1) sys.stdout.write( "Interpolating gps for {} images missing geotags.".format( len(missing_geotags))) for image, timestamp in tqdm(missing_geotags, desc="Interpolating missing gps"): # interpolate try: lat, lon, bearing, elevation = interpolate_lat_lon( geotags, timestamp, max_time_delta) except Exception as e: print( "Error, {}, interpolation of latitude and longitude failed for image {}" .format(e, image)) continue # insert into exif exif_edit = ExifEdit(image) if lat and lon: exif_edit.add_lat_lon(lat, lon) else: print("Error, lat and lon not interpolated for image {}.". format(image)) if bearing: exif_edit.add_direction(bearing) else: if verbose: print( "Warning, bearing not interpolated for image {}.". format(image)) if elevation: exif_edit.add_altitude(elevation) else: if verbose: print( "Warning, altitude not interpolated for image {}.". format(image)) meta = {} add_meta_tag(meta, "booleans", "interpolated_gps", True) exif_edit.add_image_history(meta["MAPMetaTags"]) file_out = image if not keep_original else image[:-4] + "_processed." exif_edit.write(filename=file_out) elif data == "identical_timestamps": sys.stdout.write("Loading image timestamps.") # read timestamps timestamps = [] for image in tqdm(process_file_list, desc="Interpolating identical timestamps"): # load exif exif = ExifRead(image) timestamp = exif.extract_capture_time() if timestamp: timestamps.append(timestamp) else: print( "Capture could not be extracted for image {}.".format( image)) # interpolate timestamps_interpolated = processing.interpolate_timestamp( timestamps) print("") sys.stdout.write("Interpolating identical timestamps.") counter = 0 # write back for image, timestamp in tqdm( zip(process_file_list, timestamps_interpolated), desc="Writing capture time in image EXIF"): # print progress counter += 1 sys.stdout.write('.') if (counter % 100) == 0: print("") # load exif exif_edit = ExifEdit(image) exif_edit.add_date_time_original(timestamp) # write to exif file_out = image if not keep_original else image[:-4] + "_processed." exif_edit.write(filename=file_out) sys.exit() else: print("Error unsupported data for interpolation, exiting...") sys.exit(1) print("")
def process_csv(import_path, csv_path, filename_column=None, timestamp_column=None, latitude_column=None, longitude_column=None, heading_column=None, altitude_column=None, gps_week_column=None, time_format="%Y:%m:%d %H:%M:%S.%f", convert_gps_time=False, convert_utc_time=False, delimiter=",", header=False, meta_columns=None, meta_names=None, meta_types=None, verbose=False, keep_original=False): # sanity checks if not import_path or not os.path.isdir(import_path): print("Error, import directory " + import_path + " doesnt not exist, exiting...") sys.exit(1) if not csv_path or not os.path.isfile(csv_path): print( "Error, csv file not provided or does not exist. Please specify a valid path to a csv file." ) sys.exit(1) # get list of file to process process_file_list = uploader.get_total_file_list(import_path) if not len(process_file_list): print("No images found in the import path " + import_path) sys.exit(1) if gps_week_column != None and convert_gps_time == False: print( "Error, in order to parse timestamp provided as a combination of GPS week and GPS seconds, you must specify timestamp column and flag --convert_gps_time, exiting..." ) sys.exit(1) if (convert_gps_time != False or convert_utc_time != False) and timestamp_column == None: print( "Error, if specifying a flag to convert timestamp, timestamp column must be provided, exiting..." ) sys.exit(1) column_indexes = [ filename_column, timestamp_column, latitude_column, longitude_column, heading_column, altitude_column, gps_week_column ] if any([column == 0 for column in column_indexes]): print( "Error, csv column numbers start with 1, one of the columns specified is 0." ) sys.exit(1) column_indexes = map(lambda x: x - 1 if x else None, column_indexes) # checks for meta arguments if any meta_columns, meta_names, meta_types = validate_meta_data( meta_columns, meta_names, meta_types) # open and process csv csv_data = read_csv(csv_path, delimiter=delimiter, header=header) # align by filename column if provided, otherwise align in order of image # names file_names = None if filename_column: file_names = csv_data[filename_column - 1] else: if verbose: print( "Warning, filename column not provided, images will be aligned with the csv data in order of the image filenames." ) # process each image for idx, image in tqdm(enumerate(process_file_list), desc="Inserting csv data in image EXIF"): # get image entry index image_index = get_image_index(image, file_names) if file_names else idx if image_index == None: print("Warning, no entry found in csv file for image " + image) continue # get required data timestamp, lat, lon, heading, altitude = parse_csv_geotag_data( csv_data, image_index, column_indexes, convert_gps_time, convert_utc_time, time_format) # get meta data meta = parse_csv_meta_data(csv_data, image_index, meta_columns, meta_types, meta_names) # insert in image EXIF exif_edit = ExifEdit(image) if timestamp: exif_edit.add_date_time_original(timestamp) if lat and lon: exif_edit.add_lat_lon(lat, lon) if heading: exif_edit.add_direction(heading) if altitude: exif_edit.add_altitude(altitude) if meta: exif_edit.add_image_history(meta["MAPMetaTags"]) filename = image filename_keep_original = processing.processed_images_rootpath(image) if os.path.isfile(filename_keep_original): os.remove(filename_keep_original) if keep_original: if not os.path.isdir(os.path.dirname(filename_keep_original)): os.makedirs(os.path.dirname(filename_keep_original)) filename = filename_keep_original try: exif_edit.write(filename=filename) except: print("Error, image EXIF could not be written back for image " + image) return None
def get_final_mapillary_image_description(log_root, image, master_upload=False, verbose=False, skip_EXIF_insert=False, keep_original=False, overwrite_all_EXIF_tags=False, overwrite_EXIF_time_tag=False, overwrite_EXIF_gps_tag=False, overwrite_EXIF_direction_tag=False, overwrite_EXIF_orientation_tag=False): sub_commands = ["user_process", "geotag_process", "sequence_process", "upload_params_process", "settings_upload_hash", "import_meta_data_process"] final_mapillary_image_description = {} for sub_command in sub_commands: sub_command_status = os.path.join( log_root, sub_command + "_failed") if os.path.isfile(sub_command_status) and sub_command != "import_meta_data_process": print("Warning, required {} failed for image ".format(sub_command) + image) return None sub_command_data_path = os.path.join( log_root, sub_command + ".json") if not os.path.isfile(sub_command_data_path) and sub_command != "import_meta_data_process": if (sub_command == "settings_upload_hash" or sub_command == "upload_params_process") and master_upload: continue else: print("Warning, required {} did not result in a valid json file for image ".format( sub_command) + image) return None if sub_command == "settings_upload_hash" or sub_command == "upload_params_process": continue try: sub_command_data = load_json(sub_command_data_path) if not sub_command_data: if verbose: print( "Warning, no data read from json file " + json_file) return None final_mapillary_image_description.update(sub_command_data) except: if sub_command == "import_meta_data_process": if verbose: print("Warning, could not load json file " + sub_command_data_path) continue else: if verbose: print("Warning, could not load json file " + sub_command_data_path) return None # a unique photo ID to check for duplicates in the backend in case the # image gets uploaded more than once final_mapillary_image_description['MAPPhotoUUID'] = str( uuid.uuid4()) if skip_EXIF_insert: return final_mapillary_image_description # insert in the EXIF image description try: image_exif = ExifEdit(image) except: print_error("Error, image EXIF could not be loaded for image " + image) return None try: image_exif.add_image_description( final_mapillary_image_description) except: print_error( "Error, image EXIF tag Image Description could not be edited for image " + image) return None # also try to set time and gps so image can be placed on the map for testing and # qc purposes if overwrite_all_EXIF_tags: try: image_exif.add_date_time_original(datetime.datetime.strptime( final_mapillary_image_description["MAPCaptureTime"], '%Y_%m_%d_%H_%M_%S_%f')) except: pass try: image_exif.add_lat_lon( final_mapillary_image_description["MAPLatitude"], final_mapillary_image_description["MAPLongitude"]) except: pass try: image_exif.add_direction( final_mapillary_image_description["MAPCompassHeading"]["TrueHeading"]) except: pass try: if "MAPOrientation" in final_mapillary_image_description: image_exif.add_orientation( final_mapillary_image_description["MAPOrientation"]) except: pass else: if overwrite_EXIF_time_tag: try: image_exif.add_date_time_original(datetime.datetime.strptime( final_mapillary_image_description["MAPCaptureTime"], '%Y_%m_%d_%H_%M_%S_%f')) except: pass if overwrite_EXIF_gps_tag: try: image_exif.add_lat_lon( final_mapillary_image_description["MAPLatitude"], final_mapillary_image_description["MAPLongitude"]) except: pass if overwrite_EXIF_direction_tag: try: image_exif.add_direction( final_mapillary_image_description["MAPCompassHeading"]["TrueHeading"]) except: pass if overwrite_EXIF_orientation_tag: try: if "MAPOrientation" in final_mapillary_image_description: image_exif.add_orientation( final_mapillary_image_description["MAPOrientation"]) except: pass filename = image filename_keep_original = processed_images_rootpath(image) if os.path.isfile(filename_keep_original): os.remove(filename_keep_original) if keep_original: filename = filename_keep_original if not os.path.isdir(os.path.dirname(filename_keep_original)): os.makedirs(os.path.dirname(filename_keep_original)) try: image_exif.write(filename=filename) except: print_error( "Error, image EXIF could not be written back for image " + image) return None return final_mapillary_image_description
def interpolation(data, file_in_path=None, file_format="csv", time_column=0, delimiter=",", time_utc=False, time_format="%Y-%m-%dT%H:%M:%SZ", header=False, keep_original=False, import_path=None, max_time_delta=1, verbose=False): if not data: print_error("Error, you must specify the data for interpolation." + 'Choose between "missing_gps" or "identical_timestamps"') sys.exit(1) if not import_path and not file_in_path: print_error("Error, you must specify a path to data, either path to directory with images or path to an external log file.") sys.exit(1) if file_in_path: if not os.path.isfile(file_in_path): print_error("Error, specified input file does not exist, exiting...") sys.exit(1) if file_format != "csv": print_error("Only csv file format is supported at the moment, exiting...") sys.exit(1) csv_data = process_csv.read_csv( file_in_path, delimiter=delimiter, header=header) if data == "identical_timestamps": timestamps = csv_data[time_column] timestamps_datetime = [process_csv.format_time( timestamp, time_utc, time_format) for timestamp in timestamps] timestamps_interpolated = processing.interpolate_timestamp( timestamps_datetime) csv_data[time_column] = format_datetime( timestamps_interpolated, time_utc, time_format) file_out = file_in_path if not keep_original else file_in_path[ :-4] + "_processed." + file_format with open(file_out, "w") as csvfile: csvwriter = csv.writer(csvfile, delimiter=delimiter) for row in zip(*csv_data): csvwriter.writerow(row) sys.exit() elif data == "missing_gps": print_error( "Error, missing gps interpolation in an external log file not supported yet, exiting...") sys.exit(1) else: print_error("Error unsupported data for interpolation, exiting...") sys.exit(1) if import_path: if not os.path.isdir(import_path): print_error("Error, specified import path does not exist, exiting...") sys.exit(1) # get list of files to process process_file_list = uploader.get_total_file_list(import_path) if not len(process_file_list): print("No images found in the import path " + import_path) sys.exit(1) if data == "missing_gps": # get geotags from images and a list of tuples with images missing geotags # and their timestamp geotags, missing_geotags = processing.get_images_geotags( process_file_list) if not len(missing_geotags): print("No images in directory {} missing geotags, exiting...".format( import_path)) sys.exit(1) if not len(geotags): print("No images in directory {} with geotags.".format(import_path)) sys.exit(1) sys.stdout.write("Interpolating gps for {} images missing geotags.".format( len(missing_geotags))) for image, timestamp in tqdm(missing_geotags, desc="Interpolating missing gps"): # interpolate try: lat, lon, bearing, elevation = interpolate_lat_lon( geotags, timestamp, max_time_delta) except Exception as e: print_error("Error, {}, interpolation of latitude and longitude failed for image {}".format( e, image)) continue # insert into exif exif_edit = ExifEdit(image) if lat and lon: exif_edit.add_lat_lon(lat, lon) else: print_error( "Error, lat and lon not interpolated for image {}.".format(image)) if bearing: exif_edit.add_direction(bearing) else: if verbose: print( "Warning, bearing not interpolated for image {}.".format(image)) if elevation: exif_edit.add_altitude(elevation) else: if verbose: print( "Warning, altitude not interpolated for image {}.".format(image)) meta = {} add_meta_tag(meta, "booleans", "interpolated_gps", True) exif_edit.add_image_history(meta["MAPMetaTags"]) file_out = image if not keep_original else image[:- 4] + "_processed." exif_edit.write(filename=file_out) elif data == "identical_timestamps": sys.stdout.write("Loading image timestamps.") # read timestamps timestamps = [] for image in tqdm(process_file_list, desc="Interpolating identical timestamps"): # load exif exif = ExifRead(image) timestamp = exif.extract_capture_time() if timestamp: timestamps.append(timestamp) else: print("Capture could not be extracted for image {}.".format(image)) # interpolate timestamps_interpolated = processing.interpolate_timestamp( timestamps) print("") sys.stdout.write("Interpolating identical timestamps.") counter = 0 # write back for image, timestamp in tqdm(zip(process_file_list, timestamps_interpolated), desc="Writing capture time in image EXIF"): # print progress counter += 1 sys.stdout.write('.') if (counter % 100) == 0: print("") # load exif exif_edit = ExifEdit(image) exif_edit.add_date_time_original(timestamp) # write to exif file_out = image if not keep_original else image[ :-4] + "_processed." exif_edit.write(filename=file_out) sys.exit() else: print_error("Error unsupported data for interpolation, exiting...") sys.exit(1) print("")
def get_final_mapillary_image_description(log_root, image, master_upload=False, verbose=False, skip_EXIF_insert=False): sub_commands = ["user_process", "geotag_process", "sequence_process", "upload_params_process", "settings_upload_hash", "import_meta_data_process"] final_mapillary_image_description = {} for sub_command in sub_commands: sub_command_status = os.path.join( log_root, sub_command + "_failed") if os.path.isfile(sub_command_status) and sub_command != "import_meta_data_process": if verbose: print("Warning, required {} failed for image ".format(sub_command) + image) return None sub_command_data_path = os.path.join( log_root, sub_command + ".json") if not os.path.isfile(sub_command_data_path) and sub_command != "import_meta_data_process": if (sub_command == "settings_upload_hash" or sub_command == "upload_params_process") and master_upload: continue else: if verbose: print("Warning, required {} did not result in a valid json file for image ".format( sub_command) + image) return None if sub_command == "settings_upload_hash" or sub_command == "upload_params_process": continue try: sub_command_data = load_json(sub_command_data_path) if not sub_command_data: if verbose: print( "Warning, no data read from json file " + json_file) return None final_mapillary_image_description.update(sub_command_data) except: if sub_command == "import_meta_data_process": if verbose: print("Warning, could not load json file " + sub_command_data_path) continue else: if verbose: print("Warning, could not load json file " + sub_command_data_path) return None # a unique photo ID to check for duplicates in the backend in case the # image gets uploaded more than once final_mapillary_image_description['MAPPhotoUUID'] = str( uuid.uuid4()) if skip_EXIF_insert: return final_mapillary_image_description # insert in the EXIF image description try: image_exif = ExifEdit(image) except: print("Error, image EXIF could not be loaded for image " + image) return None try: image_exif.add_image_description( final_mapillary_image_description) except: print( "Error, image EXIF tag Image Description could not be edited for image " + image) return None # also try to set time and gps so image can be placed on the map for testing and # qc purposes try: image_exif.add_date_time_original(datetime.datetime.strptime( final_mapillary_image_description["MAPCaptureTime"], '%Y_%m_%d_%H_%M_%S_%f')) except: pass try: image_exif.add_lat_lon( final_mapillary_image_description["MAPLatitude"], final_mapillary_image_description["MAPLongitude"]) except: pass try: image_exif.add_direction( final_mapillary_image_description["MAPCompassHeading"]["TrueHeading"]) except: pass try: image_exif.write() except: print("Error, image EXIF could not be written back for image " + image) return None return final_mapillary_image_description
def process_csv(import_path, csv_path, filename_column=None, timestamp_column=None, latitude_column=None, longitude_column=None, heading_column=None, altitude_column=None, gps_week_column=None, time_format="%Y:%m:%d %H:%M:%S.%f", convert_gps_time=False, convert_utc_time=False, delimiter=",", header=False, meta_columns=None, meta_names=None, meta_types=None, verbose=False, keep_original=False): # sanity checks if not import_path or not os.path.isdir(import_path): print("Error, import directory " + import_path + " doesnt not exist, exiting...") sys.exit(1) if not csv_path or not os.path.isfile(csv_path): print("Error, csv file not provided or does not exist. Please specify a valid path to a csv file.") sys.exit(1) # get list of file to process process_file_list = uploader.get_total_file_list(import_path) if not len(process_file_list): print("No images found in the import path " + import_path) sys.exit(1) if gps_week_column != None and convert_gps_time == False: print("Error, in order to parse timestamp provided as a combination of GPS week and GPS seconds, you must specify timestamp column and flag --convert_gps_time, exiting...") sys.exit(1) if (convert_gps_time != False or convert_utc_time != False) and timestamp_column == None: print("Error, if specifying a flag to convert timestamp, timestamp column must be provided, exiting...") sys.exit(1) column_indexes = [filename_column, timestamp_column, latitude_column, longitude_column, heading_column, altitude_column, gps_week_column] if any([column == 0 for column in column_indexes]): print("Error, csv column numbers start with 1, one of the columns specified is 0.") sys.exit(1) column_indexes = map(lambda x: x - 1 if x else None, column_indexes) # checks for meta arguments if any meta_columns, meta_names, meta_types = validate_meta_data( meta_columns, meta_names, meta_types) # open and process csv csv_data = read_csv(csv_path, delimiter=delimiter, header=header) # align by filename column if provided, otherwise align in order of image # names file_names = None if filename_column: file_names = csv_data[filename_column - 1] else: if verbose: print("Warning, filename column not provided, images will be aligned with the csv data in order of the image filenames.") # process each image for idx, image in tqdm(enumerate(process_file_list), desc="Inserting csv data in image EXIF"): # get image entry index image_index = get_image_index(image, file_names) if file_names else idx if image_index == None: print("Warning, no entry found in csv file for image " + image) continue # get required data timestamp, lat, lon, heading, altitude = parse_csv_geotag_data( csv_data, image_index, column_indexes, convert_gps_time, convert_utc_time, time_format) # get meta data meta = parse_csv_meta_data( csv_data, image_index, meta_columns, meta_types, meta_names) # insert in image EXIF exif_edit = ExifEdit(image) if timestamp: exif_edit.add_date_time_original(timestamp) if lat and lon: exif_edit.add_lat_lon(lat, lon) if heading: exif_edit.add_direction(heading) if altitude: exif_edit.add_altitude(altitude) if meta: exif_edit.add_image_history(meta["MAPMetaTags"]) filename = image filename_keep_original = processing.processed_images_rootpath(image) if os.path.isfile(filename_keep_original): os.remove(filename_keep_original) if keep_original: if not os.path.isdir(os.path.dirname(filename_keep_original)): os.makedirs(os.path.dirname(filename_keep_original)) filename = filename_keep_original try: exif_edit.write(filename=filename) except: print("Error, image EXIF could not be written back for image " + image) return None