def get_import_meta_properties_exif(image, verbose=False): import_meta_data_properties = {} try: exif = ExifRead(image) except: if verbose: print("Warning, EXIF could not be read for image " + image + ", import properties not read.") return None try: import_meta_data_properties["MAPOrientation"] = exif.extract_orientation( ) except: if verbose: print("Warning, image orientation tag not in EXIF.") try: import_meta_data_properties["MAPDeviceMake"] = exif.extract_make( ) except: if verbose: print("Warning, camera make tag not in EXIF.") try: import_meta_data_properties["MAPDeviceModel"] = exif.extract_model( ) except: if verbose: print("Warning, camera model tag not in EXIF.") try: import_meta_data_properties["MAPMetaTags"] = eval(exif.extract_image_history( )) except: pass return import_meta_data_properties
def verify_exif(filename): ''' Check that image file has the required EXIF fields. Incompatible files will be ignored server side. ''' # required tags in IFD name convention required_exif = required_fields() exif = ExifRead(filename) required_exif_exist = exif.fields_exist(required_exif) return required_exif_exist
def get_geotag_properties_from_exif(image, offset_angle=0.0, verbose=False): try: exif = ExifRead(image) except: print_error("Error, EXIF could not be read for image " + image + ", geotagging process failed for this image since gps/time properties not read.") return None # required tags try: lon, lat = exif.extract_lon_lat() except: print_error("Error, " + image + " image latitude or longitude tag not in EXIF. Geotagging process failed for this image, since this is required information.") return None if lat != None and lon != None: geotag_properties = {"MAPLatitude": lat} geotag_properties["MAPLongitude"] = lon else: print_error("Error, " + image + " image latitude or longitude tag not in EXIF. Geotagging process failed for this image, since this is required information.") return None try: timestamp = exif.extract_capture_time() except: print_error("Error, " + image + " image capture time tag not in EXIF. Geotagging process failed for this image, since this is required information.") return None try: geotag_properties["MAPCaptureTime"] = datetime.datetime.strftime( timestamp, "%Y_%m_%d_%H_%M_%S_%f")[:-3] except: print_error("Error, {} image capture time tag incorrect format. Geotagging process failed for this image, since this is required information.".format(image)) return None # optional fields try: geotag_properties["MAPAltitude"] = exif.extract_altitude() except: if verbose: print("Warning, image altitude tag not in EXIF.") try: heading = exif.extract_direction() if heading is None: heading = 0.0 heading = normalize_bearing(heading + offset_angle) # bearing of the image geotag_properties["MAPCompassHeading"] = {"TrueHeading": heading, "MagneticHeading": heading} except: if verbose: print("Warning, image direction tag not in EXIF.") return geotag_properties
def get_images_geotags(process_file_list): geotags = [] missing_geotags = [] for image in sorted(process_file_list): exif = ExifRead(image) timestamp = exif.extract_capture_time() lon, lat = exif.extract_lon_lat() altitude = exif.extract_altitude() if timestamp and lon and lat: geotags.append((timestamp, lat, lon, altitude)) continue if timestamp and (not lon or not lat): missing_geotags.append((image, timestamp)) else: print("Error image {} does not have captured time.".format(image)) return geotags, missing_geotags
def get_image_list(path_to_pics): """ Create a list of image tuples sorted by capture timestamp. @param directory: directory with JPEG files @return: a list of image tuples with time, directory, lat,long... :param path_to_pics: """ print("Searching for jpeg images in ", path_to_pics, end=" ") file_list = [] for root, sub_folders, files in os.walk(path_to_pics): file_list += [ os.path.join(root, filename) for filename in files if filename.lower().endswith(".jpg") ] files = [] # get DateTimeOriginal data from the images and sort the list by timestamp for filepath in file_list: #print(filepath) metadata = EXIFRead(filepath) try: t = metadata.extract_capture_time() s = int(t.microsecond / 1000000) geo = metadata.extract_geo() lat = geo.get("latitude") lon = geo.get("longitude") ele = geo.get("altitude") direction = metadata.extract_direction() files.append( Picture_infos._replace(path=filepath, DateTimeOriginal=t, SubSecTimeOriginal=s, Latitude=lat, Longitude=lon, Ele=ele, ImgDirection=direction)) # print t # print type(t) except KeyError as e: # if any of the required tags are not set the image is not added to the list print("Skipping {0}: {1}".format(filepath, e)) files.sort(key=lambda file: file.DateTimeOriginal) # print_list(files) print("{:5} found".format(len(files))) return files
def verify_mapillary_tag(filepath): filepath_keep_original = processing.processed_images_rootpath(filepath) if os.path.isfile(filepath_keep_original): filepath = filepath_keep_original ''' Check that image file has the required Mapillary tag ''' return ExifRead(filepath).mapillary_tag_exists()
def get_images_geotags(process_file_list): geotags = [] missing_geotags = [] for image in tqdm(sorted(process_file_list), desc="Reading gps data"): exif = ExifRead(image) timestamp = exif.extract_capture_time() lon, lat = exif.extract_lon_lat() altitude = exif.extract_altitude() if timestamp and lon and lat: geotags.append((timestamp, lat, lon, altitude)) continue if timestamp and (not lon or not lat): missing_geotags.append((image, timestamp)) else: print_error( "Error image {} does not have captured time.".format(image)) return geotags, missing_geotags
def get_geotag_properties_from_exif(image, offset_angle=0.0, verbose=False): try: exif = ExifRead(image) except: print( "Error, EXIF could not be read for image " + image + ", geotagging process failed for this image since gps/time properties not read." ) return None # required tags try: lon, lat = exif.extract_lon_lat() except: print( "Error, " + image + " image latitude or longitude tag not in EXIF. Geotagging process failed for this image, since this is required information." ) return None if lat != None and lon != None: geotag_properties = {"MAPLatitude": lat} geotag_properties["MAPLongitude"] = lon else: print( "Error, " + image + " image latitude or longitude tag not in EXIF. Geotagging process failed for this image, since this is required information." ) return None try: timestamp = exif.extract_capture_time() except: print( "Error, " + image + " image capture time tag not in EXIF. Geotagging process failed for this image, since this is required information." ) return None geotag_properties["MAPCaptureTime"] = datetime.datetime.strftime( timestamp, "%Y_%m_%d_%H_%M_%S_%f")[:-3] # optional fields try: geotag_properties["MAPAltitude"] = exif.extract_altitude() except: if verbose: print("Warning, image altitude tag not in EXIF.") try: heading = exif.extract_direction() if heading is None: heading = 0.0 heading = normalize_bearing(heading + offset_angle) # bearing of the image geotag_properties["MAPCompassHeading"] = { "TrueHeading": heading, "MagneticHeading": heading } except: if verbose: print("Warning, image direction tag not in EXIF.") return geotag_properties
def get_import_meta_properties_exif(image, verbose=False): import_meta_data_properties = {} try: exif = ExifRead(image) except: if verbose: print("Warning, EXIF could not be read for image " + image + ", import properties not read.") return None try: import_meta_data_properties[ "MAPOrientation"] = exif.extract_orientation() except: if verbose: print("Warning, image orientation tag not in EXIF.") try: import_meta_data_properties["MAPDeviceMake"] = exif.extract_make() except: if verbose: print("Warning, camera make tag not in EXIF.") try: import_meta_data_properties["MAPDeviceModel"] = exif.extract_model() except: if verbose: print("Warning, camera model tag not in EXIF.") try: import_meta_data_properties["MAPMetaTags"] = eval( exif.extract_image_history()) except: pass return import_meta_data_properties
def upload_file(filepath, max_attempts, url, permission, signature, key=None, aws_key=None): ''' Upload file at filepath. ''' if max_attempts == None: max_attempts = MAX_ATTEMPTS filename = os.path.basename(filepath) s3_filename = filename try: s3_filename = ExifRead(filepath).exif_name() except: pass filepath_keep_original = processing.processed_images_rootpath(filepath) filepath_in = filepath if os.path.isfile(filepath_keep_original): filepath = filepath_keep_original # add S3 'path' if given if key is None: s3_key = s3_filename else: s3_key = key + s3_filename parameters = { "key": s3_key, "AWSAccessKeyId": aws_key, "acl": "private", "policy": permission, "signature": signature, "Content-Type": "image/jpeg" } with open(filepath, "rb") as f: encoded_string = f.read() data, headers = encode_multipart( parameters, {'file': { 'filename': filename, 'content': encoded_string }}) if (DRY_RUN == False): displayed_upload_error = False for attempt in range(max_attempts): # Initialize response before each attempt response = None try: request = urllib2.Request(url, data=data, headers=headers) response = urllib2.urlopen(request) if response.getcode() == 204: create_upload_log(filepath_in, "upload_success") if displayed_upload_error == True: print("Successful upload of {} on attempt {}".format( filename, attempt + 1)) else: create_upload_log(filepath_in, "upload_failed") break # attempts except urllib2.HTTPError as e: print( "HTTP error: {} on {}, will attempt upload again for {} more times" .format(e, filename, max_attempts - attempt - 1)) displayed_upload_error = True time.sleep(5) except urllib2.URLError as e: print( "URL error: {} on {}, will attempt upload again for {} more times" .format(e, filename, max_attempts - attempt - 1)) time.sleep(5) except httplib.HTTPException as e: print( "HTTP exception: {} on {}, will attempt upload again for {} more times" .format(e, filename, max_attempts - attempt - 1)) time.sleep(5) except OSError as e: print( "OS error: {} on {}, will attempt upload again for {} more times" .format(e, filename, max_attempts - attempt - 1)) time.sleep(5) except socket.timeout as e: # Specific timeout handling for Python 2.7 print( "Timeout error: {} (retrying), will attempt upload again for {} more times" .format(filename, max_attempts - attempt - 1)) finally: if response is not None: response.close() else: print( 'DRY_RUN, Skipping actual image upload. Use this for debug only.')
def upload_file(filepath, url, permission, signature, key=None, aws_key=None): ''' Upload file at filepath. ''' filename = os.path.basename(filepath) done_file = is_done_file(filename) s3_filename = filename try: if not done_file: s3_filename = ExifRead(filepath).exif_name() except: pass filepath_keep_original = processing.processed_images_rootpath(filepath) filepath_in = filepath if os.path.isfile(filepath_keep_original): filepath = filepath_keep_original # add S3 'path' if given if key is None: s3_key = s3_filename else: s3_key = key + s3_filename parameters = {"key": s3_key, "AWSAccessKeyId": aws_key, "acl": "private", "policy": permission, "signature": signature, "Content-Type": "image/jpeg"} with open(filepath, "rb") as f: encoded_string = f.read() data, headers = encode_multipart( parameters, {'file': {'filename': filename, 'content': encoded_string}}) for attempt in range(MAX_ATTEMPTS): # Initialize response before each attempt response = None try: request = urllib2.Request(url, data=data, headers=headers) response = urllib2.urlopen(request) if not done_file: if response.getcode() == 204: create_upload_log(filepath_in, "upload_success") else: create_upload_log(filepath_in, "upload_failed") break # attempts except urllib2.HTTPError as e: print("HTTP error: {0} on {1}".format(e, filename)) time.sleep(5) except urllib2.URLError as e: print("URL error: {0} on {1}".format(e, filename)) time.sleep(5) except httplib.HTTPException as e: print("HTTP exception: {0} on {1}".format(e, filename)) time.sleep(5) except OSError as e: print("OS error: {0} on {1}".format(e, filename)) time.sleep(5) except socket.timeout as e: # Specific timeout handling for Python 2.7 print("Timeout error: {0} (retrying)".format(filename)) finally: if response is not None: response.close()
def upload_file(filepath, max_attempts, session): ''' Upload file at filepath. ''' if max_attempts == None: max_attempts = MAX_ATTEMPTS try: exif_read = ExifRead(filepath) except: pass filename = os.path.basename(filepath) try: exif_name = exif_read.exif_name() _, file_extension = os.path.splitext(filename) s3_filename = exif_name + file_extension except: s3_filename = filename try: lat, lon, ca, captured_at = exif_read.exif_properties() new_session = copy.deepcopy(session) session_fields = new_session["fields"] session_fields["X-Amz-Meta-Latitude"] = lat session_fields["X-Amz-Meta-Longitude"] = lon session_fields["X-Amz-Meta-Compass-Angle"] = ca session_fields["X-Amz-Meta-Captured-At"] = captured_at session = new_session except: pass filepath_keep_original = processing.processed_images_rootpath(filepath) filepath_in = filepath if os.path.isfile(filepath_keep_original): filepath = filepath_keep_original if (DRY_RUN == False): displayed_upload_error = False for attempt in range(max_attempts): try: response = upload_api.upload_file(session, filepath, s3_filename) if 200 <= response.status_code < 300: create_upload_log(filepath_in, "upload_success") if displayed_upload_error == True: print("Successful upload of {} on attempt {}".format( filename, attempt + 1)) else: create_upload_log(filepath_in, "upload_failed") print(response.text) break # attempts except requests.RequestException as e: print( "HTTP error: {} on {}, will attempt upload again for {} more times" .format(e, filename, max_attempts - attempt - 1)) displayed_upload_error = True time.sleep(5) except OSError as e: print( "OS error: {} on {}, will attempt upload again for {} more times" .format(e, filename, max_attempts - attempt - 1)) time.sleep(5) except socket.timeout as e: # Specific timeout handling for Python 2.7 print( "Timeout error: {} (retrying), will attempt upload again for {} more times" .format(filename, max_attempts - attempt - 1)) else: print( 'DRY_RUN, Skipping actual image upload. Use this for debug only.')
def exif_time(filename): ''' Get image capture time from exif ''' metadata = ExifRead(filename) return metadata.extract_capture_time()
def interpolation(data, file_in_path=None, file_format="csv", time_column=0, delimiter=",", time_utc=False, time_format="%Y-%m-%dT%H:%M:%SZ", header=False, keep_original=False, import_path=None, max_time_delta=1, verbose=False): if not data: print_error("Error, you must specify the data for interpolation." + 'Choose between "missing_gps" or "identical_timestamps"') sys.exit(1) if not import_path and not file_in_path: print_error("Error, you must specify a path to data, either path to directory with images or path to an external log file.") sys.exit(1) if file_in_path: if not os.path.isfile(file_in_path): print_error("Error, specified input file does not exist, exiting...") sys.exit(1) if file_format != "csv": print_error("Only csv file format is supported at the moment, exiting...") sys.exit(1) csv_data = process_csv.read_csv( file_in_path, delimiter=delimiter, header=header) if data == "identical_timestamps": timestamps = csv_data[time_column] timestamps_datetime = [process_csv.format_time( timestamp, time_utc, time_format) for timestamp in timestamps] timestamps_interpolated = processing.interpolate_timestamp( timestamps_datetime) csv_data[time_column] = format_datetime( timestamps_interpolated, time_utc, time_format) file_out = file_in_path if not keep_original else file_in_path[ :-4] + "_processed." + file_format with open(file_out, "w") as csvfile: csvwriter = csv.writer(csvfile, delimiter=delimiter) for row in zip(*csv_data): csvwriter.writerow(row) sys.exit() elif data == "missing_gps": print_error( "Error, missing gps interpolation in an external log file not supported yet, exiting...") sys.exit(1) else: print_error("Error unsupported data for interpolation, exiting...") sys.exit(1) if import_path: if not os.path.isdir(import_path): print_error("Error, specified import path does not exist, exiting...") sys.exit(1) # get list of files to process process_file_list = uploader.get_total_file_list(import_path) if not len(process_file_list): print("No images found in the import path " + import_path) sys.exit(1) if data == "missing_gps": # get geotags from images and a list of tuples with images missing geotags # and their timestamp geotags, missing_geotags = processing.get_images_geotags( process_file_list) if not len(missing_geotags): print("No images in directory {} missing geotags, exiting...".format( import_path)) sys.exit(1) if not len(geotags): print("No images in directory {} with geotags.".format(import_path)) sys.exit(1) sys.stdout.write("Interpolating gps for {} images missing geotags.".format( len(missing_geotags))) for image, timestamp in tqdm(missing_geotags, desc="Interpolating missing gps"): # interpolate try: lat, lon, bearing, elevation = interpolate_lat_lon( geotags, timestamp, max_time_delta) except Exception as e: print_error("Error, {}, interpolation of latitude and longitude failed for image {}".format( e, image)) continue # insert into exif exif_edit = ExifEdit(image) if lat and lon: exif_edit.add_lat_lon(lat, lon) else: print_error( "Error, lat and lon not interpolated for image {}.".format(image)) if bearing: exif_edit.add_direction(bearing) else: if verbose: print( "Warning, bearing not interpolated for image {}.".format(image)) if elevation: exif_edit.add_altitude(elevation) else: if verbose: print( "Warning, altitude not interpolated for image {}.".format(image)) meta = {} add_meta_tag(meta, "booleans", "interpolated_gps", True) exif_edit.add_image_history(meta["MAPMetaTags"]) file_out = image if not keep_original else image[:- 4] + "_processed." exif_edit.write(filename=file_out) elif data == "identical_timestamps": sys.stdout.write("Loading image timestamps.") # read timestamps timestamps = [] for image in tqdm(process_file_list, desc="Interpolating identical timestamps"): # load exif exif = ExifRead(image) timestamp = exif.extract_capture_time() if timestamp: timestamps.append(timestamp) else: print("Capture could not be extracted for image {}.".format(image)) # interpolate timestamps_interpolated = processing.interpolate_timestamp( timestamps) print("") sys.stdout.write("Interpolating identical timestamps.") counter = 0 # write back for image, timestamp in tqdm(zip(process_file_list, timestamps_interpolated), desc="Writing capture time in image EXIF"): # print progress counter += 1 sys.stdout.write('.') if (counter % 100) == 0: print("") # load exif exif_edit = ExifEdit(image) exif_edit.add_date_time_original(timestamp) # write to exif file_out = image if not keep_original else image[ :-4] + "_processed." exif_edit.write(filename=file_out) sys.exit() else: print_error("Error unsupported data for interpolation, exiting...") sys.exit(1) print("")
def interpolation(data, file_in_path=None, file_format="csv", time_column=0, delimiter=",", time_utc=False, time_format="%Y-%m-%dT%H:%M:%SZ", header=False, keep_original=False, import_path=None, max_time_delta=1, verbose=False): if not data: print("Error, you must specify the data for interpolation.") print('Choose between "missing_gps" or "identical_timestamps"') sys.exit(1) if not import_path and not file_in_path: print( "Error, you must specify a path to data, either path to directory with images or path to an external log file." ) sys.exit(1) if file_in_path: if not os.path.isfile(file_in_path): print("Error, specified input file does not exist, exiting...") sys.exit(1) if file_format != "csv": print( "Only csv file format is supported at the moment, exiting...") sys.exit(1) csv_data = process_csv.read_csv(file_in_path, delimiter=delimiter, header=header) if data == "identical_timestamps": timestamps = csv_data[time_column] timestamps_datetime = [ process_csv.format_time(timestamp, time_utc, time_format) for timestamp in timestamps ] timestamps_interpolated = processing.interpolate_timestamp( timestamps_datetime) csv_data[time_column] = format_datetime(timestamps_interpolated, time_utc, time_format) file_out = file_in_path if not keep_original else file_in_path[: -4] + "_processed." + file_format with open(file_out, "w") as csvfile: csvwriter = csv.writer(csvfile, delimiter=delimiter) for row in zip(*csv_data): csvwriter.writerow(row) sys.exit() elif data == "missing_gps": print( "Error, missing gps interpolation in an external log file not supported yet, exiting..." ) sys.exit(1) else: print("Error unsupported data for interpolation, exiting...") sys.exit(1) if import_path: if not os.path.isdir(import_path): print("Error, specified import path does not exist, exiting...") sys.exit(1) # get list of files to process process_file_list = uploader.get_total_file_list(import_path) if not len(process_file_list): print("No images found in the import path " + import_path) sys.exit(1) if data == "missing_gps": # get geotags from images and a list of tuples with images missing geotags # and their timestamp geotags, missing_geotags = processing.get_images_geotags( process_file_list) if not len(missing_geotags): print("No images in directory {} missing geotags, exiting...". format(import_path)) sys.exit(1) if not len(geotags): print("No images in directory {} with geotags.".format( import_path)) sys.exit(1) sys.stdout.write( "Interpolating gps for {} images missing geotags.".format( len(missing_geotags))) for image, timestamp in tqdm(missing_geotags, desc="Interpolating missing gps"): # interpolate try: lat, lon, bearing, elevation = interpolate_lat_lon( geotags, timestamp, max_time_delta) except Exception as e: print( "Error, {}, interpolation of latitude and longitude failed for image {}" .format(e, image)) continue # insert into exif exif_edit = ExifEdit(image) if lat and lon: exif_edit.add_lat_lon(lat, lon) else: print("Error, lat and lon not interpolated for image {}.". format(image)) if bearing: exif_edit.add_direction(bearing) else: if verbose: print( "Warning, bearing not interpolated for image {}.". format(image)) if elevation: exif_edit.add_altitude(elevation) else: if verbose: print( "Warning, altitude not interpolated for image {}.". format(image)) meta = {} add_meta_tag(meta, "booleans", "interpolated_gps", True) exif_edit.add_image_history(meta["MAPMetaTags"]) file_out = image if not keep_original else image[:-4] + "_processed." exif_edit.write(filename=file_out) elif data == "identical_timestamps": sys.stdout.write("Loading image timestamps.") # read timestamps timestamps = [] for image in tqdm(process_file_list, desc="Interpolating identical timestamps"): # load exif exif = ExifRead(image) timestamp = exif.extract_capture_time() if timestamp: timestamps.append(timestamp) else: print( "Capture could not be extracted for image {}.".format( image)) # interpolate timestamps_interpolated = processing.interpolate_timestamp( timestamps) print("") sys.stdout.write("Interpolating identical timestamps.") counter = 0 # write back for image, timestamp in tqdm( zip(process_file_list, timestamps_interpolated), desc="Writing capture time in image EXIF"): # print progress counter += 1 sys.stdout.write('.') if (counter % 100) == 0: print("") # load exif exif_edit = ExifEdit(image) exif_edit.add_date_time_original(timestamp) # write to exif file_out = image if not keep_original else image[:-4] + "_processed." exif_edit.write(filename=file_out) sys.exit() else: print("Error unsupported data for interpolation, exiting...") sys.exit(1) print("")
def verify_mapillary_tag(filename): ''' Check that image file has the required Mapillary tag ''' return ExifRead(filename).mapillary_tag_exists()