def verify_mapillary_tag(filepath): filepath_keep_original = processing.processed_images_rootpath(filepath) if os.path.isfile(filepath_keep_original): filepath = filepath_keep_original ''' Check that image file has the required Mapillary tag ''' return ExifRead(filepath).mapillary_tag_exists()
def upload_file(filepath, max_attempts, url, permission, signature, key=None, aws_key=None): ''' Upload file at filepath. ''' if max_attempts == None: max_attempts = MAX_ATTEMPTS filename = os.path.basename(filepath) s3_filename = filename try: s3_filename = ExifRead(filepath).exif_name() except: pass filepath_keep_original = processing.processed_images_rootpath(filepath) filepath_in = filepath if os.path.isfile(filepath_keep_original): filepath = filepath_keep_original # add S3 'path' if given if key is None: s3_key = s3_filename else: s3_key = key + s3_filename parameters = { "key": s3_key, "AWSAccessKeyId": aws_key, "acl": "private", "policy": permission, "signature": signature, "Content-Type": "image/jpeg" } with open(filepath, "rb") as f: encoded_string = f.read() data, headers = encode_multipart( parameters, {'file': { 'filename': filename, 'content': encoded_string }}) if (DRY_RUN == False): displayed_upload_error = False for attempt in range(max_attempts): # Initialize response before each attempt response = None try: request = urllib2.Request(url, data=data, headers=headers) response = urllib2.urlopen(request) if response.getcode() == 204: create_upload_log(filepath_in, "upload_success") if displayed_upload_error == True: print("Successful upload of {} on attempt {}".format( filename, attempt + 1)) else: create_upload_log(filepath_in, "upload_failed") break # attempts except urllib2.HTTPError as e: print( "HTTP error: {} on {}, will attempt upload again for {} more times" .format(e, filename, max_attempts - attempt - 1)) displayed_upload_error = True time.sleep(5) except urllib2.URLError as e: print( "URL error: {} on {}, will attempt upload again for {} more times" .format(e, filename, max_attempts - attempt - 1)) time.sleep(5) except httplib.HTTPException as e: print( "HTTP exception: {} on {}, will attempt upload again for {} more times" .format(e, filename, max_attempts - attempt - 1)) time.sleep(5) except OSError as e: print( "OS error: {} on {}, will attempt upload again for {} more times" .format(e, filename, max_attempts - attempt - 1)) time.sleep(5) except socket.timeout as e: # Specific timeout handling for Python 2.7 print( "Timeout error: {} (retrying), will attempt upload again for {} more times" .format(filename, max_attempts - attempt - 1)) finally: if response is not None: response.close() else: print( 'DRY_RUN, Skipping actual image upload. Use this for debug only.')
def upload_file(filepath, max_attempts, url, permission, signature, key=None, aws_key=None): ''' Upload file at filepath. ''' if max_attempts == None: max_attempts = MAX_ATTEMPTS filename = os.path.basename(filepath) s3_filename = filename try: s3_filename = ExifRead(filepath).exif_name() except: pass filepath_keep_original = processing.processed_images_rootpath(filepath) filepath_in = filepath if os.path.isfile(filepath_keep_original): filepath = filepath_keep_original # add S3 'path' if given if key is None: s3_key = s3_filename else: s3_key = key + s3_filename parameters = {"key": s3_key, "AWSAccessKeyId": aws_key, "acl": "private", "policy": permission, "signature": signature, "Content-Type": "image/jpeg"} with open(filepath, "rb") as f: encoded_string = f.read() data, headers = encode_multipart( parameters, {'file': {'filename': filename, 'content': encoded_string}}) if (DRY_RUN == False): displayed_upload_error = False for attempt in range(max_attempts): # Initialize response before each attempt response = None try: request = urllib2.Request(url, data=data, headers=headers) response = urllib2.urlopen(request) if response.getcode() == 204: create_upload_log(filepath_in, "upload_success") if displayed_upload_error == True: print("Successful upload of {} on attempt {}".format( filename, attempt+1)) else: create_upload_log(filepath_in, "upload_failed") break # attempts except urllib2.HTTPError as e: print("HTTP error: {} on {}, will attempt upload again for {} more times".format( e, filename, max_attempts - attempt - 1)) displayed_upload_error = True time.sleep(5) except urllib2.URLError as e: print("URL error: {} on {}, will attempt upload again for {} more times".format( e, filename, max_attempts - attempt - 1)) time.sleep(5) except httplib.HTTPException as e: print("HTTP exception: {} on {}, will attempt upload again for {} more times".format( e, filename, max_attempts - attempt - 1)) time.sleep(5) except OSError as e: print("OS error: {} on {}, will attempt upload again for {} more times".format( e, filename, max_attempts - attempt - 1)) time.sleep(5) except socket.timeout as e: # Specific timeout handling for Python 2.7 print("Timeout error: {} (retrying), will attempt upload again for {} more times".format( filename, max_attempts - attempt - 1)) finally: if response is not None: response.close() else: print('DRY_RUN, Skipping actual image upload. Use this for debug only.')
def upload_file(filepath, url, permission, signature, key=None, aws_key=None): ''' Upload file at filepath. ''' filename = os.path.basename(filepath) done_file = is_done_file(filename) s3_filename = filename try: if not done_file: s3_filename = ExifRead(filepath).exif_name() except: pass filepath_keep_original = processing.processed_images_rootpath(filepath) filepath_in = filepath if os.path.isfile(filepath_keep_original): filepath = filepath_keep_original # add S3 'path' if given if key is None: s3_key = s3_filename else: s3_key = key + s3_filename parameters = {"key": s3_key, "AWSAccessKeyId": aws_key, "acl": "private", "policy": permission, "signature": signature, "Content-Type": "image/jpeg"} with open(filepath, "rb") as f: encoded_string = f.read() data, headers = encode_multipart( parameters, {'file': {'filename': filename, 'content': encoded_string}}) for attempt in range(MAX_ATTEMPTS): # Initialize response before each attempt response = None try: request = urllib2.Request(url, data=data, headers=headers) response = urllib2.urlopen(request) if not done_file: if response.getcode() == 204: create_upload_log(filepath_in, "upload_success") else: create_upload_log(filepath_in, "upload_failed") break # attempts except urllib2.HTTPError as e: print("HTTP error: {0} on {1}".format(e, filename)) time.sleep(5) except urllib2.URLError as e: print("URL error: {0} on {1}".format(e, filename)) time.sleep(5) except httplib.HTTPException as e: print("HTTP exception: {0} on {1}".format(e, filename)) time.sleep(5) except OSError as e: print("OS error: {0} on {1}".format(e, filename)) time.sleep(5) except socket.timeout as e: # Specific timeout handling for Python 2.7 print("Timeout error: {0} (retrying)".format(filename)) finally: if response is not None: response.close()
def process_csv(import_path, csv_path, filename_column=None, timestamp_column=None, latitude_column=None, longitude_column=None, heading_column=None, altitude_column=None, gps_week_column=None, time_format="%Y:%m:%d %H:%M:%S.%f", convert_gps_time=False, convert_utc_time=False, delimiter=",", header=False, meta_columns=None, meta_names=None, meta_types=None, verbose=False, keep_original=False): # sanity checks if not import_path or not os.path.isdir(import_path): print("Error, import directory " + import_path + " doesnt not exist, exiting...") sys.exit(1) if not csv_path or not os.path.isfile(csv_path): print( "Error, csv file not provided or does not exist. Please specify a valid path to a csv file." ) sys.exit(1) # get list of file to process process_file_list = uploader.get_total_file_list(import_path) if not len(process_file_list): print("No images found in the import path " + import_path) sys.exit(1) if gps_week_column != None and convert_gps_time == False: print( "Error, in order to parse timestamp provided as a combination of GPS week and GPS seconds, you must specify timestamp column and flag --convert_gps_time, exiting..." ) sys.exit(1) if (convert_gps_time != False or convert_utc_time != False) and timestamp_column == None: print( "Error, if specifying a flag to convert timestamp, timestamp column must be provided, exiting..." ) sys.exit(1) column_indexes = [ filename_column, timestamp_column, latitude_column, longitude_column, heading_column, altitude_column, gps_week_column ] if any([column == 0 for column in column_indexes]): print( "Error, csv column numbers start with 1, one of the columns specified is 0." ) sys.exit(1) column_indexes = map(lambda x: x - 1 if x else None, column_indexes) # checks for meta arguments if any meta_columns, meta_names, meta_types = validate_meta_data( meta_columns, meta_names, meta_types) # open and process csv csv_data = read_csv(csv_path, delimiter=delimiter, header=header) # align by filename column if provided, otherwise align in order of image # names file_names = None if filename_column: file_names = csv_data[filename_column - 1] else: if verbose: print( "Warning, filename column not provided, images will be aligned with the csv data in order of the image filenames." ) # process each image for idx, image in tqdm(enumerate(process_file_list), desc="Inserting csv data in image EXIF"): # get image entry index image_index = get_image_index(image, file_names) if file_names else idx if image_index == None: print("Warning, no entry found in csv file for image " + image) continue # get required data timestamp, lat, lon, heading, altitude = parse_csv_geotag_data( csv_data, image_index, column_indexes, convert_gps_time, convert_utc_time, time_format) # get meta data meta = parse_csv_meta_data(csv_data, image_index, meta_columns, meta_types, meta_names) # insert in image EXIF exif_edit = ExifEdit(image) if timestamp: exif_edit.add_date_time_original(timestamp) if lat and lon: exif_edit.add_lat_lon(lat, lon) if heading: exif_edit.add_direction(heading) if altitude: exif_edit.add_altitude(altitude) if meta: exif_edit.add_image_history(meta["MAPMetaTags"]) filename = image filename_keep_original = processing.processed_images_rootpath(image) if os.path.isfile(filename_keep_original): os.remove(filename_keep_original) if keep_original: if not os.path.isdir(os.path.dirname(filename_keep_original)): os.makedirs(os.path.dirname(filename_keep_original)) filename = filename_keep_original try: exif_edit.write(filename=filename) except: print("Error, image EXIF could not be written back for image " + image) return None
def upload_file(filepath, max_attempts, session): ''' Upload file at filepath. ''' if max_attempts == None: max_attempts = MAX_ATTEMPTS try: exif_read = ExifRead(filepath) except: pass filename = os.path.basename(filepath) try: exif_name = exif_read.exif_name() _, file_extension = os.path.splitext(filename) s3_filename = exif_name + file_extension except: s3_filename = filename try: lat, lon, ca, captured_at = exif_read.exif_properties() new_session = copy.deepcopy(session) session_fields = new_session["fields"] session_fields["X-Amz-Meta-Latitude"] = lat session_fields["X-Amz-Meta-Longitude"] = lon session_fields["X-Amz-Meta-Compass-Angle"] = ca session_fields["X-Amz-Meta-Captured-At"] = captured_at session = new_session except: pass filepath_keep_original = processing.processed_images_rootpath(filepath) filepath_in = filepath if os.path.isfile(filepath_keep_original): filepath = filepath_keep_original if (DRY_RUN == False): displayed_upload_error = False for attempt in range(max_attempts): try: response = upload_api.upload_file(session, filepath, s3_filename) if 200 <= response.status_code < 300: create_upload_log(filepath_in, "upload_success") if displayed_upload_error == True: print("Successful upload of {} on attempt {}".format( filename, attempt + 1)) else: create_upload_log(filepath_in, "upload_failed") print(response.text) break # attempts except requests.RequestException as e: print( "HTTP error: {} on {}, will attempt upload again for {} more times" .format(e, filename, max_attempts - attempt - 1)) displayed_upload_error = True time.sleep(5) except OSError as e: print( "OS error: {} on {}, will attempt upload again for {} more times" .format(e, filename, max_attempts - attempt - 1)) time.sleep(5) except socket.timeout as e: # Specific timeout handling for Python 2.7 print( "Timeout error: {} (retrying), will attempt upload again for {} more times" .format(filename, max_attempts - attempt - 1)) else: print( 'DRY_RUN, Skipping actual image upload. Use this for debug only.')
def process_csv(import_path, csv_path, filename_column=None, timestamp_column=None, latitude_column=None, longitude_column=None, heading_column=None, altitude_column=None, gps_week_column=None, time_format="%Y:%m:%d %H:%M:%S.%f", convert_gps_time=False, convert_utc_time=False, delimiter=",", header=False, meta_columns=None, meta_names=None, meta_types=None, verbose=False, keep_original=False): # sanity checks if not import_path or not os.path.isdir(import_path): print("Error, import directory " + import_path + " doesnt not exist, exiting...") sys.exit(1) if not csv_path or not os.path.isfile(csv_path): print("Error, csv file not provided or does not exist. Please specify a valid path to a csv file.") sys.exit(1) # get list of file to process process_file_list = uploader.get_total_file_list(import_path) if not len(process_file_list): print("No images found in the import path " + import_path) sys.exit(1) if gps_week_column != None and convert_gps_time == False: print("Error, in order to parse timestamp provided as a combination of GPS week and GPS seconds, you must specify timestamp column and flag --convert_gps_time, exiting...") sys.exit(1) if (convert_gps_time != False or convert_utc_time != False) and timestamp_column == None: print("Error, if specifying a flag to convert timestamp, timestamp column must be provided, exiting...") sys.exit(1) column_indexes = [filename_column, timestamp_column, latitude_column, longitude_column, heading_column, altitude_column, gps_week_column] if any([column == 0 for column in column_indexes]): print("Error, csv column numbers start with 1, one of the columns specified is 0.") sys.exit(1) column_indexes = map(lambda x: x - 1 if x else None, column_indexes) # checks for meta arguments if any meta_columns, meta_names, meta_types = validate_meta_data( meta_columns, meta_names, meta_types) # open and process csv csv_data = read_csv(csv_path, delimiter=delimiter, header=header) # align by filename column if provided, otherwise align in order of image # names file_names = None if filename_column: file_names = csv_data[filename_column - 1] else: if verbose: print("Warning, filename column not provided, images will be aligned with the csv data in order of the image filenames.") # process each image for idx, image in tqdm(enumerate(process_file_list), desc="Inserting csv data in image EXIF"): # get image entry index image_index = get_image_index(image, file_names) if file_names else idx if image_index == None: print("Warning, no entry found in csv file for image " + image) continue # get required data timestamp, lat, lon, heading, altitude = parse_csv_geotag_data( csv_data, image_index, column_indexes, convert_gps_time, convert_utc_time, time_format) # get meta data meta = parse_csv_meta_data( csv_data, image_index, meta_columns, meta_types, meta_names) # insert in image EXIF exif_edit = ExifEdit(image) if timestamp: exif_edit.add_date_time_original(timestamp) if lat and lon: exif_edit.add_lat_lon(lat, lon) if heading: exif_edit.add_direction(heading) if altitude: exif_edit.add_altitude(altitude) if meta: exif_edit.add_image_history(meta["MAPMetaTags"]) filename = image filename_keep_original = processing.processed_images_rootpath(image) if os.path.isfile(filename_keep_original): os.remove(filename_keep_original) if keep_original: if not os.path.isdir(os.path.dirname(filename_keep_original)): os.makedirs(os.path.dirname(filename_keep_original)) filename = filename_keep_original try: exif_edit.write(filename=filename) except: print("Error, image EXIF could not be written back for image " + image) return None