def insert_video_frame_timestamp(video_filename, video_sampling_path, start_time, sample_interval=2.0, duration_ratio=1.0, verbose=False):

    # get list of file to process
    frame_list = uploader.get_total_file_list(video_sampling_path)

    if not len(frame_list):
        # WARNING LOG
        print("No video frames were sampled.")
        return

    video_frame_timestamps = timestamps_from_filename(video_filename,
                                                      frame_list,
                                                      start_time,
                                                      sample_interval,
                                                      duration_ratio)

    for image, timestamp in tqdm(zip(frame_list,
                                     video_frame_timestamps), desc="Inserting frame capture time"):
        try:
            exif_edit = ExifEdit(image)
            exif_edit.add_date_time_original(timestamp)
            exif_edit.write()
        except:
            # ERROR LOG
            print("Could not insert timestamp into video frame " +
                  os.path.basename(image)[:-4])
            continue
Example #2
0
def save_local_mapping(import_path):
    local_mapping_filepath = os.path.join(os.path.dirname(
        import_path), import_path + "_mapillary_image_uuid_to_local_path_mapping.csv")

    total_files = uploader.get_total_file_list(import_path)

    local_mapping = []
    for file in tqdm(total_files, desc="Reading image uuids"):
        image_file_uuid = None
        relative_path = file.lstrip(os.path.abspath(import_path))
        log_rootpath = uploader.log_rootpath(file)
        image_description_json_path = os.path.join(
            log_rootpath, "mapillary_image_description.json")
        if os.path.isfile(image_description_json_path):
            image_description_json = processing.load_json(
                image_description_json_path)
            if "MAPPhotoUUID" in image_description_json:
                image_file_uuid = image_description_json["MAPPhotoUUID"]
            else:
                print(
                    "Error, photo uuid not in mapillary_image_description.json log file.")
        else:
            image_exif = exif_read.ExifRead(file)
            image_description = json.loads(
                image_exif.extract_image_description())
            if "MAPPhotoUUID" in image_description:
                image_file_uuid = str(image_description["MAPPhotoUUID"])
            else:
                print("Warning, image {} EXIF does not contain mapillary image description and mapillary_image_description.json log file does not exist. Try to process the image using mapillary_tools.".format(file))
        if image_file_uuid:
            local_mapping.append((relative_path, image_file_uuid))
    return local_mapping
Example #3
0
def insert_video_frame_timestamp(video_filename,
                                 video_sampling_path,
                                 start_time,
                                 sample_interval=2.0,
                                 duration_ratio=1.0,
                                 verbose=False):

    # get list of file to process
    frame_list = uploader.get_total_file_list(video_sampling_path)

    if not len(frame_list):
        # WARNING LOG
        print("No video frames were sampled.")
        return

    video_frame_timestamps = timestamps_from_filename(video_filename,
                                                      frame_list, start_time,
                                                      sample_interval,
                                                      duration_ratio)

    for image, timestamp in tqdm(zip(frame_list, video_frame_timestamps),
                                 desc="Inserting frame capture time"):
        try:
            exif_edit = ExifEdit(image)
            exif_edit.add_date_time_original(timestamp)
            exif_edit.write()
        except:
            # ERROR LOG
            print("Could not insert timestamp into video frame " +
                  os.path.basename(image)[:-4])
            continue
Example #4
0
def insert_video_frame_timestamp(import_path, start_time, sample_interval, duration_ratio=1.0, verbose=False):

    # get list of file to process
    frame_list = uploader.get_total_file_list(import_path)

    if not len(frame_list):
        if verbose:
            print("No video frames were sampled.")
        return

    video_frame_timestamps = timestamps_from_filename(frame_list,
                                                      start_time,
                                                      sample_interval,
                                                      duration_ratio)
    for image, timestamp in zip(frame_list,
                                video_frame_timestamps):
        try:
            exif_edit = ExifEdit(image)
            exif_edit.add_date_time_original(timestamp)
            exif_edit.write()
        except:
            if verbose:
                print("Could not insert timestamp into video frame " +
                      os.path.basename(image)[:-4])
            continue
Example #5
0
def download(import_path,
             user_name,
             output_folder,
             number_threads=10,
             verbose=False):
    total_files = uploader.get_total_file_list(import_path)
    rows = []

    local_mapping = save_local_mapping(import_path)

    signal.signal(signal.SIGTERM, service_shutdown)
    signal.signal(signal.SIGINT, service_shutdown)

    try:
        user_properties = uploader.authenticate_user(user_name)
    except:
        print("Error, user authentication failed for user " + user_name)
        print(
            "Make sure your user credentials are correct, user authentication is required for images to be downloaded from Mapillary."
        )
        return None
    if "user_upload_token" in user_properties:
        token = user_properties["user_upload_token"]
    else:
        print("Error, failed to obtain user token, please try again.")
        return None
    do_sleep = False
    while not check_files_downloaded(local_mapping, output_folder, do_sleep):
        do_sleep = True

        lock = threading.Lock()

        downloaded_images = {
            "failed": 0,
            "nbr": 0,
            "success": 0,
        }

        threads = []
        try:
            for i in range(number_threads):
                t = BlurDownloader(lock, downloaded_images, local_mapping,
                                   output_folder, token)
                threads.append(t)
                t.start()
            while True:
                any_alive = False
                for t in threads:
                    any_alive = (any_alive or t.is_alive())

                if not any_alive:
                    break

                time.sleep(0.5)
        except ServiceExit:
            for t in threads:
                t.shutdown_flag.set()
            for t in threads:
                t.join()
            break
def inform_processing_start(import_path, len_process_file_list, process, skip_subfolders=False):

    total_file_list = uploader.get_total_file_list(
        import_path, skip_subfolders)
    print("Running {} for {} images, skipping {} images.".format(process,
                                                                 len_process_file_list,
                                                                 len(total_file_list) - len_process_file_list))
Example #7
0
def insert_video_frame_timestamp(import_path,
                                 start_time,
                                 sample_interval,
                                 duration_ratio=1.0,
                                 verbose=False):

    # get list of file to process
    frame_list = uploader.get_total_file_list(import_path)

    if not len(frame_list):
        if verbose:
            print("No video frames were sampled.")
        return

    video_frame_timestamps = timestamps_from_filename(frame_list, start_time,
                                                      sample_interval,
                                                      duration_ratio)
    for image, timestamp in zip(frame_list, video_frame_timestamps):
        try:
            exif_edit = ExifEdit(image)
            exif_edit.add_date_time_original(timestamp)
            exif_edit.write()
        except:
            if verbose:
                print("Could not insert timestamp into video frame " +
                      os.path.basename(image)[:-4])
            continue
def save_local_mapping(import_path):
    local_mapping_filepath = os.path.join(os.path.dirname(
        import_path), import_path + "_mapillary_image_uuid_to_local_path_mapping.csv")

    total_files = uploader.get_total_file_list(import_path)

    local_mapping = []
    for file in tqdm(total_files, desc="Reading image uuids"):
        image_file_uuid = None
        relative_path = file.lstrip(os.path.abspath(import_path))
        log_rootpath = uploader.log_rootpath(file)
        image_description_json_path = os.path.join(
            log_rootpath, "mapillary_image_description.json")
        if os.path.isfile(image_description_json_path):
            image_description_json = processing.load_json(
                image_description_json_path)
            if "MAPPhotoUUID" in image_description_json:
                image_file_uuid = image_description_json["MAPPhotoUUID"]
            else:
                print(
                    "Error, photo uuid not in mapillary_image_description.json log file.")
        else:
            image_exif = exif_read.ExifRead(file)
            image_description = json.loads(
                image_exif.extract_image_description())
            if "MAPPhotoUUID" in image_description:
                image_file_uuid = str(image_description["MAPPhotoUUID"])
            else:
                print("Warning, image {} EXIF does not contain mapillary image description and mapillary_image_description.json log file does not exist. Try to process the image using mapillary_tools.".format(file))
        if image_file_uuid:
            local_mapping.append((relative_path, image_file_uuid))
    return local_mapping
Example #9
0
def inform_processing_start(import_path, len_process_file_list, process, skip_subfolders=False):

    total_file_list = uploader.get_total_file_list(
        import_path, skip_subfolders)
    print("Running {} for {} images, skipping {} images.".format(process,
                                                                 len_process_file_list,
                                                                 len(total_file_list) - len_process_file_list))
Example #10
0
def download(import_path, user_name, output_folder, number_threads=10, verbose=False):
    total_files = uploader.get_total_file_list(import_path)
    rows = []

    local_mapping = save_local_mapping(import_path)

    signal.signal(signal.SIGTERM, service_shutdown)
    signal.signal(signal.SIGINT, service_shutdown)

    try:
        user_properties = uploader.authenticate_user(user_name)
    except:
        print("Error, user authentication failed for user " + user_name)
        print("Make sure your user credentials are correct, user authentication is required for images to be downloaded from Mapillary.")
        return None
    if "user_upload_token" in user_properties:
        token = user_properties["user_upload_token"]
    else:
        print("Error, failed to obtain user token, please try again.")
        return None
    do_sleep = False
    while not check_files_downloaded(local_mapping, output_folder, do_sleep):
        do_sleep = True

        lock = threading.Lock()

        downloaded_images = {
            "failed": 0,
            "nbr": 0,
            "success": 0,
        }

        threads = []
        try:
            for i in range(number_threads):
                t = BlurDownloader(lock, downloaded_images,
                                   local_mapping, output_folder, token)
                threads.append(t)
                t.start()
            while True:
                any_alive = False
                for t in threads:
                    any_alive = (any_alive or t.is_alive())

                if not any_alive:
                    break

                time.sleep(0.5)
        except ServiceExit:
            for t in threads:
                t.shutdown_flag.set()
            for t in threads:
                t.join()
            break
Example #11
0
def upload(import_path,
           manual_done=False,
           verbose=False,
           skip_subfolders=False,
           video_file=None,
           number_threads=None,
           max_attempts=None):
    '''
    Upload local images to Mapillary
    Args:
        import_path: Directory path to where the images are stored.
        verbose: Print extra warnings and errors.
        skip_subfolders: Skip images stored in subdirectories.
        manual_done: Prompt user to confirm upload finalization.

    Returns:
        Images are uploaded to Mapillary and flagged locally as uploaded.
    '''
    # sanity check if video file is passed
    if video_file and not (os.path.isdir(video_file)
                           or os.path.isfile(video_file)):
        print("Error, video path " + video_file +
              " does not exist, exiting...")
        sys.exit(1)

    # in case of video processing, adjust the import path
    if video_file:
        # set sampling path
        video_sampling_path = processing.sampled_video_frames_rootpath(
            video_file)
        import_path = os.path.join(
            os.path.abspath(import_path),
            video_sampling_path) if import_path else os.path.join(
                os.path.dirname(video_file), video_sampling_path)

    # basic check for all
    if not import_path or not os.path.isdir(import_path):
        print("Error, import directory " + import_path +
              " does not exist, exiting...")
        sys.exit(1)

    # get list of file to process
    total_file_list = uploader.get_total_file_list(import_path,
                                                   skip_subfolders)
    upload_file_list = uploader.get_upload_file_list(import_path,
                                                     skip_subfolders)
    failed_file_list = uploader.get_failed_upload_file_list(
        import_path, skip_subfolders)
    success_file_list = uploader.get_success_upload_file_list(
        import_path, skip_subfolders)

    if len(success_file_list) == len(total_file_list):
        print("All images have already been uploaded")
        sys.exit()

    if len(failed_file_list):
        upload_failed = raw_input(
            "Retry uploading previously failed image uploads? [y/n]: ")
        # if yes, add images to the upload list
        if upload_failed in ["y", "Y", "yes", "Yes"]:
            upload_file_list.extend(failed_file_list)

    # verify the images in the upload list, they need to have the image
    # description and certain MAP properties
    upload_file_list = [f for f in upload_file_list if verify_mapillary_tag(f)]

    if not len(upload_file_list):
        print("No images to upload.")
        print(
            'Please check if all images contain the required Mapillary metadata. If not, you can use "mapillary_tools process" to add them'
        )
        sys.exit(1)

    # get upload params
    params = {}
    for image in total_file_list:
        log_root = uploader.log_rootpath(image)
        upload_params_path = os.path.join(log_root,
                                          "upload_params_process.json")
        if os.path.isfile(upload_params_path):
            with open(upload_params_path, "rb") as jf:
                params[image] = json.load(
                    jf, object_hook=uploader.ascii_encode_dict)

    # inform how many images are to be uploaded and how many are being skipped
    # from upload
    print("Uploading {} images with valid mapillary tags (Skipping {})".format(
        len(upload_file_list),
        len(total_file_list) - len(upload_file_list)))

    # call the actual upload, passing the list of images, the root of the
    # import and the upload params
    uploader.upload_file_list(upload_file_list, params, number_threads,
                              max_attempts)

    # finalize manual uploads if necessary
    finalize_file_list = uploader.get_finalize_file_list(
        import_path, skip_subfolders)

    # if manual uploads a DONE file needs to be uploaded to let the harvester
    # know the sequence is done uploading
    if len(finalize_file_list):
        finalize_all = 1
        if manual_done:
            finalize_all = uploader.prompt_to_finalize("uploads")
        if finalize_all:
            # get the s3 locations of the sequences
            finalize_params = uploader.process_upload_finalization(
                finalize_file_list, params)
            uploader.finalize_upload(finalize_params)
            # flag finalization for each file
            uploader.flag_finalization(finalize_file_list)
        else:
            print("Uploads will not be finalized.")
            print(
                "If you wish to finalize your uploads, run the upload tool again."
            )
            sys.exit()

    uploader.print_summary(upload_file_list)
Example #12
0
def upload(import_path, manual_done=False, verbose=False, skip_subfolders=False):
    '''
    Upload local images to Mapillary
    Args:
        import_path: Directory path to where the images are stored.
        verbose: Print extra warnings and errors.
        skip_subfolders: Skip images stored in subdirectories.
        manual_done: Prompt user to confirm upload finalization.

    Returns:
        Images are uploaded to Mapillary and flagged locally as uploaded.
    '''

    # basic check for all
    import_path = os.path.abspath(import_path)
    if not os.path.isdir(import_path):
        print("Error, import directory " + import_path +
              " doesnt not exist, exiting...")
        sys.exit()

    # get list of file to process
    total_file_list = uploader.get_total_file_list(
        import_path, skip_subfolders)
    upload_file_list = uploader.get_upload_file_list(
        import_path, skip_subfolders)
    failed_file_list = uploader.get_failed_upload_file_list(
        import_path, skip_subfolders)
    success_file_list = uploader.get_success_upload_file_list(
        import_path, skip_subfolders)

    if len(success_file_list) == len(total_file_list):
        print("All images have already been uploaded")
        sys.exit()

    if len(failed_file_list):
        upload_failed = raw_input(
            "Retry uploading previously failed image uploads? [y/n]: ")
        # if yes, add images to the upload list
        if upload_failed in ["y", "Y", "yes", "Yes"]:
            upload_file_list.extend(failed_file_list)

    # verify the images in the upload list, they need to have the image
    # description and certain MAP properties
    upload_file_list = [f for f in upload_file_list if verify_mapillary_tag(f)]

    if not len(upload_file_list):
        print("No images to upload.")
        print('Please check if all images contain the required Mapillary metadata. If not, you can use "mapillary_tools process" to add them')
        sys.exit()

    # get upload params
    params = {}
    for image in total_file_list:
        log_root = uploader.log_rootpath(import_path, image)
        upload_params_path = os.path.join(
            log_root, "upload_params_process.json")
        if os.path.isfile(upload_params_path):
            with open(upload_params_path, "rb") as jf:
                params[image] = json.load(
                    jf, object_hook=uploader.ascii_encode_dict)

    # inform how many images are to be uploaded and how many are being skipped
    # from upload
    print("Uploading {} images with valid mapillary tags (Skipping {})".format(
        len(upload_file_list), len(total_file_list) - len(upload_file_list)))

    # call the actual upload, passing the list of images, the root of the
    # import and the upload params
    uploader.upload_file_list(upload_file_list, import_path, params)

    # finalize manual uploads if necessary
    finalize_file_list = uploader.get_finalize_file_list(
        import_path, skip_subfolders)

    # if manual uploads a DONE file needs to be uploaded to let the harvester
    # know the sequence is done uploading
    if len(finalize_file_list):
        finalize_all = 1
        if manual_done:
            finalize_all = uploader.prompt_to_finalize("uploads")
        if finalize_all:
            # get the s3 locations of the sequences
            finalize_params = uploader.process_upload_finalization(
                finalize_file_list, params)
            uploader.finalize_upload(finalize_params)
            # flag finalization for each file
            uploader.flag_finalization(import_path, finalize_file_list)
        else:
            print("Uploads will not be finalized.")
            print("If you wish to finalize your uploads, run the upload tool again.")
            sys.exit()

    uploader.print_summary(upload_file_list)
def process_csv(import_path,
                csv_path,
                filename_column=None,
                timestamp_column=None,
                latitude_column=None,
                longitude_column=None,
                heading_column=None,
                altitude_column=None,
                gps_week_column=None,
                time_format="%Y:%m:%d %H:%M:%S.%f",
                convert_gps_time=False,
                convert_utc_time=False,
                delimiter=",",
                header=False,
                meta_columns=None,
                meta_names=None,
                meta_types=None,
                verbose=False,
                keep_original=False):

    # sanity checks
    if not import_path or not os.path.isdir(import_path):
        print("Error, import directory " + import_path +
              " doesnt not exist, exiting...")
        sys.exit(1)

    if not csv_path or not os.path.isfile(csv_path):
        print(
            "Error, csv file not provided or does not exist. Please specify a valid path to a csv file."
        )
        sys.exit(1)

    # get list of file to process
    process_file_list = uploader.get_total_file_list(import_path)
    if not len(process_file_list):
        print("No images found in the import path " + import_path)
        sys.exit(1)

    if gps_week_column != None and convert_gps_time == False:
        print(
            "Error, in order to parse timestamp provided as a combination of GPS week and GPS seconds, you must specify timestamp column and flag --convert_gps_time, exiting..."
        )
        sys.exit(1)

    if (convert_gps_time != False
            or convert_utc_time != False) and timestamp_column == None:
        print(
            "Error, if specifying a flag to convert timestamp, timestamp column must be provided, exiting..."
        )
        sys.exit(1)

    column_indexes = [
        filename_column, timestamp_column, latitude_column, longitude_column,
        heading_column, altitude_column, gps_week_column
    ]

    if any([column == 0 for column in column_indexes]):
        print(
            "Error, csv column numbers start with 1, one of the columns specified is 0."
        )
        sys.exit(1)

    column_indexes = map(lambda x: x - 1 if x else None, column_indexes)

    # checks for meta arguments if any
    meta_columns, meta_names, meta_types = validate_meta_data(
        meta_columns, meta_names, meta_types)

    # open and process csv
    csv_data = read_csv(csv_path, delimiter=delimiter, header=header)

    # align by filename column if provided, otherwise align in order of image
    # names
    file_names = None
    if filename_column:
        file_names = csv_data[filename_column - 1]
    else:
        if verbose:
            print(
                "Warning, filename column not provided, images will be aligned with the csv data in order of the image filenames."
            )

    # process each image
    for idx, image in tqdm(enumerate(process_file_list),
                           desc="Inserting csv data in image EXIF"):

        # get image entry index
        image_index = get_image_index(image, file_names) if file_names else idx
        if image_index == None:
            print("Warning, no entry found in csv file for image " + image)
            continue

        # get required data
        timestamp, lat, lon, heading, altitude = parse_csv_geotag_data(
            csv_data, image_index, column_indexes, convert_gps_time,
            convert_utc_time, time_format)

        # get meta data
        meta = parse_csv_meta_data(csv_data, image_index, meta_columns,
                                   meta_types, meta_names)

        # insert in image EXIF
        exif_edit = ExifEdit(image)
        if timestamp:
            exif_edit.add_date_time_original(timestamp)
        if lat and lon:
            exif_edit.add_lat_lon(lat, lon)
        if heading:
            exif_edit.add_direction(heading)
        if altitude:
            exif_edit.add_altitude(altitude)
        if meta:
            exif_edit.add_image_history(meta["MAPMetaTags"])

        filename = image
        filename_keep_original = processing.processed_images_rootpath(image)

        if os.path.isfile(filename_keep_original):
            os.remove(filename_keep_original)

        if keep_original:
            if not os.path.isdir(os.path.dirname(filename_keep_original)):
                os.makedirs(os.path.dirname(filename_keep_original))
            filename = filename_keep_original

        try:
            exif_edit.write(filename=filename)
        except:
            print("Error, image EXIF could not be written back for image " +
                  image)
            return None
Example #14
0
def post_process(import_path,
                 split_import_path=None,
                 video_import_path=None,
                 summarize=False,
                 move_images=False,
                 move_duplicates=False,
                 move_uploaded=False,
                 save_as_json=False,
                 list_file_status=False,
                 push_images=False,
                 skip_subfolders=False,
                 verbose=False,
                 save_local_mapping=False):

    # return if nothing specified
    if not summarize and not move_images and not list_file_status and not push_images and not move_duplicates and not move_uploaded and not save_local_mapping:
        print("No post processing action specified.")
        return

    # sanity check if video file is passed
    if video_import_path and not os.path.isdir(video_import_path):
        print("Error, video path " + video_import_path +
              " does not exist, exiting...")
        sys.exit(1)

    # in case of video processing, adjust the import path
    if video_import_path:
        # set sampling path
        video_sampling_path = "mapillary_sampled_video_frames"
        import_path = os.path.join(os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join(
            os.path.abspath(video_import_path), video_sampling_path)

    # basic check for all
    if not import_path or not os.path.isdir(import_path):
        print("Error, import directory " + import_path +
              " does not exist, exiting...")
        sys.exit(1)
    if save_local_mapping:
        local_mapping = save_local_mapping(import_path)
        with open(local_mapping_filepath, "w") as csvfile:
            csvwriter = csv.writer(csvfile, delimiter=",")
            for row in local_mapping:
                csvwriter.writerow(row)
    else:
        print("Reading import logs for import path {}...".format(import_path))

        # collect logs
        summary_dict = {}
        status_list_dict = {}

        total_files = uploader.get_total_file_list(import_path)
        total_files_count = len(total_files)

        # upload logs
        uploaded_files = uploader.get_success_upload_file_list(
            import_path, skip_subfolders)
        uploaded_files_count = len(uploaded_files)

        failed_upload_files = uploader.get_failed_upload_file_list(
            import_path, skip_subfolders)
        failed_upload_files_count = len(failed_upload_files)

        to_be_finalized_files = uploader.get_finalize_file_list(import_path)
        to_be_finalized_files_count = len(to_be_finalized_files)

        summary_dict["total images"] = total_files_count
        summary_dict["upload summary"] = {
            "successfully uploaded": uploaded_files_count,
            "failed uploads": failed_upload_files_count,
            "uploaded to be finalized": to_be_finalized_files_count
        }

        status_list_dict["successfully uploaded"] = uploaded_files
        status_list_dict["failed uploads"] = failed_upload_files
        status_list_dict["uploaded to be finalized"] = to_be_finalized_files

        # process logs
        summary_dict["process summary"] = {}
        process_steps = ["user_process", "import_meta_process", "geotag_process",
                         "sequence_process", "upload_params_process", "mapillary_image_description"]
        process_status = ["success", "failed"]
        for step in process_steps:

            process_success = len(processing.get_process_status_file_list(
                import_path, step, "success", skip_subfolders))
            process_failed = len(processing.get_process_status_file_list(
                import_path, step, "failed", skip_subfolders))

            summary_dict["process summary"][step] = {
                "failed": process_failed,
                "success": process_success
            }

        duplicates_file_list = processing.get_duplicate_file_list(
            import_path, skip_subfolders)
        duplicates_file_list_count = len(duplicates_file_list)

        summary_dict["process summary"]["duplicates"] = duplicates_file_list_count
        status_list_dict["duplicates"] = duplicates_file_list

        # processed for upload
        to_be_uploaded_files = uploader.get_upload_file_list(
            import_path, skip_subfolders)
        to_be_uploaded_files_count = len(to_be_uploaded_files)
        summary_dict["process summary"]["processed_not_yet_uploaded"] = to_be_uploaded_files_count
        status_list_dict["processed_not_yet_uploaded"] = to_be_uploaded_files

        # summary
        if summarize:
            print("")
            print("Import summary for import path {} :".format(import_path))
            print(json.dumps(summary_dict, indent=4))

            ipc.send('summary', summary_dict)

            if save_as_json:

                try:
                    processing.save_json(summary_dict, os.path.join(
                        import_path, "mapillary_import_summary.json"))
                except Exception as e:
                    print("Could not save summary into json at {}, due to {}".format(
                        os.path.join(import_path, "mapillary_import_summary.json"), e))

        # list file status
        if list_file_status:
            print("")
            print("List of file status for import path {} :".format(import_path))
            print(json.dumps(status_list_dict, indent=4))

            if save_as_json:

                try:
                    processing.save_json(status_list_dict, os.path.join(
                        import_path, "mapillary_import_image_status_list.json"))
                except Exception as e:
                    print("Could not save image status list into json at {}, due to {}".format(
                        os.path.join(import_path, "mapillary_import_image_status_list.json"), e))

        # push images that were uploaded successfully
        # collect upload params
        if push_images:
            to_be_pushed_files = uploader.get_success_only_manual_upload_file_list(
                import_path, skip_subfolders)
            params = {}
            for image in tqdm(to_be_pushed_files, desc="Pushing images"):
                log_root = uploader.log_rootpath(image)
                upload_params_path = os.path.join(
                    log_root, "upload_params_process.json")
                if os.path.isfile(upload_params_path):
                    with open(upload_params_path, "rb") as jf:
                        params[image] = json.load(
                            jf, object_hook=uploader.ascii_encode_dict)

            # get the s3 locations of the sequences
            finalize_params = uploader.process_upload_finalization(
                to_be_pushed_files, params)
            uploader.finalize_upload(finalize_params)
            # flag finalization for each file
            uploader.flag_finalization(to_be_pushed_files)

        if move_images or move_duplicates or move_uploaded:
            print("")
            print("Note that images will be moved along with their mapillary logs in order to preserve the import status")
            defualt_split_import_path = os.path.join(
                import_path, "mapillary_import_split_images")
            if not split_import_path:
                final_split_path = defualt_split_import_path
                print("")
                print(
                    "Split import path not provided and will therefore be set to default path {}".format(defualt_split_import_path))
            if split_import_path:
                if not os.path.isfile(split_import_path):
                    final_split_path = defualt_split_import_path
                    print("Split import path does not exist, split import path will be set to default path {}".format(
                        defualt_split_import_path))
                else:
                    final_split_path = split_import_path
            print("")
            print("Splitting import path {} into {} based on image import status...".format(
                import_path, final_split_path))
            if move_images:
                move_duplicates = True
                move_uploaded = True
                # move failed uploads
                if not len(failed_upload_files):
                    print("")
                    print(
                        "There are no failed upload images in the specified import path.")
                else:
                    failed_upload_path = os.path.join(
                        final_split_path, "upload_failed")

                    if not os.path.isdir(failed_upload_path):
                        os.makedirs(failed_upload_path)

                    for failed in failed_upload_files:
                        failed_upload_image_path = os.path.join(
                            failed_upload_path, os.path.basename(failed))
                        os.rename(failed, failed_upload_path)
                        failed_upload_log_path = os.path.dirname(uploader.log_rootpath(
                            failed_upload_image_path))
                        if not os.path.isdir(failed_upload_log_path):
                            os.makedirs(failed_upload_log_path)
                        shutil.move(uploader.log_rootpath(failed),
                                    failed_upload_log_path)
                    print("")
                    print("Done moving failed upload images to {}".format(
                        failed_upload_path))
            if move_duplicates:
                if not len(duplicates_file_list):
                    print("")
                    print("There were no duplicates flagged in the specified import path. If you are processing the images with mapillary_tools and would like to flag duplicates, you must specify --advanced --flag_duplicates")
                else:
                    duplicate_path = os.path.join(
                        final_split_path, "duplicates")
                    if not os.path.isdir(duplicate_path):
                        os.makedirs(duplicate_path)
                    for duplicate in duplicates_file_list:
                        duplicate_image_path = os.path.join(
                            duplicate_path, os.path.basename(duplicate))
                        os.rename(duplicate, duplicate_image_path)
                        duplicate_log_path = os.path.dirname(uploader.log_rootpath(
                            duplicate_image_path))
                        if not os.path.isdir(duplicate_log_path):
                            os.makedirs(duplicate_log_path)
                        shutil.move(uploader.log_rootpath(duplicate),
                                    duplicate_log_path)
                    print("")
                    print("Done moving duplicate images to {}".format(
                        duplicate_path))
            if move_uploaded:
                if not len(uploaded_files):
                    print("")
                    print(
                        "There are no successfuly uploaded images in the specified import path.")
                else:
                    upload_success_path = os.path.join(
                        final_split_path, "upload_success")

                    if not os.path.isdir(upload_success_path):
                        os.makedirs(upload_success_path)

                    for uploaded in uploaded_files:
                        uploaded_image_path = os.path.join(
                            upload_success_path, os.path.basename(uploaded))
                        os.rename(uploaded, upload_success_path)
                        uploaded_log_path = os.path.dirname(uploader.log_rootpath(
                            uploaded_image_path))
                        if not os.path.isdir(uploaded_log_path):
                            os.makedirs(uploaded_log_path)
                        shutil.move(uploader.log_rootpath(uploaded),
                                    uploaded_log_path)
                    print("")
                    print("Done moving successfully uploaded images to {}".format(
                        upload_success_path))
Example #15
0
def upload(import_path, verbose=False, skip_subfolders=False, number_threads=None, max_attempts=None, video_import_path=None, dry_run=False,api_version=1.0):
    '''
    Upload local images to Mapillary
    Args:
        import_path: Directory path to where the images are stored.
        verbose: Print extra warnings and errors.
        skip_subfolders: Skip images stored in subdirectories.

    Returns:
        Images are uploaded to Mapillary and flagged locally as uploaded.
    '''
    # sanity check if video file is passed
    if video_import_path and (not os.path.isdir(video_import_path) and not os.path.isfile(video_import_path)):
        print("Error, video path " + video_import_path +
              " does not exist, exiting...")
        sys.exit(1)

    # in case of video processing, adjust the import path
    if video_import_path:
        # set sampling path
        video_sampling_path = "mapillary_sampled_video_frames"
        video_dirname = video_import_path if os.path.isdir(
            video_import_path) else os.path.dirname(video_import_path)
        import_path = os.path.join(os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join(
            os.path.abspath(video_dirname), video_sampling_path)

    # basic check for all
    if not import_path or not os.path.isdir(import_path):
        print("Error, import directory " + import_path +
              " does not exist, exiting...")
        sys.exit(1)

    # get list of file to process
    total_file_list = uploader.get_total_file_list(
        import_path, skip_subfolders)
    upload_file_list = uploader.get_upload_file_list(
        import_path, skip_subfolders)
    failed_file_list = uploader.get_failed_upload_file_list(
        import_path, skip_subfolders)
    success_file_list = uploader.get_success_upload_file_list(
        import_path, skip_subfolders)
    to_finalize_file_list = uploader.get_finalize_file_list(
        import_path, skip_subfolders)

    if len(success_file_list) == len(total_file_list):
        print("All images have already been uploaded")
    else:
        if len(failed_file_list):
            upload_failed = raw_input(
                "Retry uploading previously failed image uploads? [y/n]: ") if not ipc.is_enabled() else 'y'
            # if yes, add images to the upload list
            if upload_failed in ["y", "Y", "yes", "Yes"]:
                upload_file_list.extend(failed_file_list)

        # verify the images in the upload list, they need to have the image
        # description and certain MAP properties
        upload_file_list = [
            f for f in upload_file_list if verify_mapillary_tag(f)]

        if not len(upload_file_list) and not len(to_finalize_file_list):
            print("No images to upload.")
            print('Please check if all images contain the required Mapillary metadata. If not, you can use "mapillary_tools process" to add them')
            sys.exit(1)

        if len(upload_file_list):
            # get upload params for the manual upload images, group them per sequence
            # and separate direct upload images
            params = {}
            list_per_sequence_mapping = {}
            direct_upload_file_list = []
            for image in upload_file_list:
                log_root = uploader.log_rootpath(image)
                upload_params_path = os.path.join(
                    log_root, "upload_params_process.json")
                if os.path.isfile(upload_params_path):
                    with open(upload_params_path, "rb") as jf:
                        params[image] = json.load(
                            jf, object_hook=uploader.ascii_encode_dict)
                        sequence = params[image]["key"]
                        if sequence in list_per_sequence_mapping:
                            list_per_sequence_mapping[sequence].append(image)
                        else:
                            list_per_sequence_mapping[sequence] = [image]
                else:
                    direct_upload_file_list.append(image)

            # inform how many images are to be uploaded and how many are being skipped
            # from upload

            print("Uploading {} images with valid mapillary tags (Skipping {})".format(
                len(upload_file_list), len(total_file_list) - len(upload_file_list)))
            if api_version==2.0:
                uploder.uploadfile_list
            if len(direct_upload_file_list):
                uploader.upload_file_list_direct(
                    direct_upload_file_list, number_threads, max_attempts)
            for idx, sequence in enumerate(list_per_sequence_mapping):
                uploader.upload_file_list_manual(
                    list_per_sequence_mapping[sequence], params, idx, number_threads, max_attempts)
        if len(to_finalize_file_list):
            params = {}
            sequences = []
            for image in to_finalize_file_list:
                log_root = uploader.log_rootpath(image)
                upload_params_path = os.path.join(
                    log_root, "upload_params_process.json")
                if os.path.isfile(upload_params_path):
                    with open(upload_params_path, "rb") as jf:
                        image_params = json.load(
                            jf, object_hook=uploader.ascii_encode_dict)
                        sequence = image_params["key"]
                        if sequence not in sequences:
                            params[image] = image_params
                            sequences.append(sequence)
            for image in params:
                uploader.upload_done_file(**params[image])
            uploader.flag_finalization(to_finalize_file_list)

    uploader.print_summary(upload_file_list)
def interpolation(data,
                  file_in_path=None,
                  file_format="csv",
                  time_column=0,
                  delimiter=",",
                  time_utc=False,
                  time_format="%Y-%m-%dT%H:%M:%SZ",
                  header=False,
                  keep_original=False,
                  import_path=None,
                  max_time_delta=1,
                  verbose=False):

    if not data:
        print_error("Error, you must specify the data for interpolation." +
            'Choose between "missing_gps" or "identical_timestamps"')
        sys.exit(1)

    if not import_path and not file_in_path:
        print_error("Error, you must specify a path to data, either path to directory with images or path to an external log file.")
        sys.exit(1)

    if file_in_path:
        if not os.path.isfile(file_in_path):
            print_error("Error, specified input file does not exist, exiting...")
            sys.exit(1)
        if file_format != "csv":
            print_error("Only csv file format is supported at the moment, exiting...")
            sys.exit(1)

        csv_data = process_csv.read_csv(
            file_in_path, delimiter=delimiter, header=header)

        if data == "identical_timestamps":
            timestamps = csv_data[time_column]
            timestamps_datetime = [process_csv.format_time(
                timestamp, time_utc, time_format) for timestamp in timestamps]

            timestamps_interpolated = processing.interpolate_timestamp(
                timestamps_datetime)

            csv_data[time_column] = format_datetime(
                timestamps_interpolated, time_utc, time_format)

            file_out = file_in_path if not keep_original else file_in_path[
                :-4] + "_processed." + file_format

            with open(file_out, "w") as csvfile:
                csvwriter = csv.writer(csvfile, delimiter=delimiter)
                for row in zip(*csv_data):
                    csvwriter.writerow(row)
            sys.exit()
        elif data == "missing_gps":
            print_error(
                "Error, missing gps interpolation in an external log file not supported yet, exiting...")
            sys.exit(1)
        else:
            print_error("Error unsupported data for interpolation, exiting...")
            sys.exit(1)

    if import_path:
        if not os.path.isdir(import_path):
            print_error("Error, specified import path does not exist, exiting...")
            sys.exit(1)

        # get list of files to process
        process_file_list = uploader.get_total_file_list(import_path)
        if not len(process_file_list):
            print("No images found in the import path " + import_path)
            sys.exit(1)

        if data == "missing_gps":
            # get geotags from images and a list of tuples with images missing geotags
            # and their timestamp
            geotags, missing_geotags = processing.get_images_geotags(
                process_file_list)
            if not len(missing_geotags):
                print("No images in directory {} missing geotags, exiting...".format(
                    import_path))
                sys.exit(1)
            if not len(geotags):
                print("No images in directory {} with geotags.".format(import_path))
                sys.exit(1)

            sys.stdout.write("Interpolating gps for {} images missing geotags.".format(
                len(missing_geotags)))

            for image, timestamp in tqdm(missing_geotags, desc="Interpolating missing gps"):
                # interpolate
                try:
                    lat, lon, bearing, elevation = interpolate_lat_lon(
                        geotags, timestamp, max_time_delta)
                except Exception as e:
                    print_error("Error, {}, interpolation of latitude and longitude failed for image {}".format(
                        e, image))
                    continue
                # insert into exif
                exif_edit = ExifEdit(image)
                if lat and lon:
                    exif_edit.add_lat_lon(lat, lon)
                else:
                    print_error(
                        "Error, lat and lon not interpolated for image {}.".format(image))
                if bearing:
                    exif_edit.add_direction(bearing)
                else:
                    if verbose:
                        print(
                            "Warning, bearing not interpolated for image {}.".format(image))
                if elevation:
                    exif_edit.add_altitude(elevation)
                else:
                    if verbose:
                        print(
                            "Warning, altitude not interpolated for image {}.".format(image))

                meta = {}

                add_meta_tag(meta, "booleans", "interpolated_gps", True)

                exif_edit.add_image_history(meta["MAPMetaTags"])

                file_out = image if not keep_original else image[:-
                                                                 4] + "_processed."
                exif_edit.write(filename=file_out)

        elif data == "identical_timestamps":

            sys.stdout.write("Loading image timestamps.")

            # read timestamps
            timestamps = []
            for image in tqdm(process_file_list, desc="Interpolating identical timestamps"):

                # load exif
                exif = ExifRead(image)
                timestamp = exif.extract_capture_time()
                if timestamp:
                    timestamps.append(timestamp)
                else:
                    print("Capture could not be extracted for image {}.".format(image))

            # interpolate
            timestamps_interpolated = processing.interpolate_timestamp(
                timestamps)

            print("")
            sys.stdout.write("Interpolating identical timestamps.")
            counter = 0

            # write back
            for image, timestamp in tqdm(zip(process_file_list, timestamps_interpolated), desc="Writing capture time in image EXIF"):

                # print progress
                counter += 1
                sys.stdout.write('.')
                if (counter % 100) == 0:
                    print("")

                # load exif
                exif_edit = ExifEdit(image)
                exif_edit.add_date_time_original(timestamp)

                # write to exif
                file_out = image if not keep_original else image[
                    :-4] + "_processed."
                exif_edit.write(filename=file_out)

            sys.exit()
        else:
            print_error("Error unsupported data for interpolation, exiting...")
            sys.exit(1)
    print("")
Example #17
0
def post_process(import_path,
                 split_import_path=None,
                 video_import_path=None,
                 summarize=False,
                 move_all_images=False,
                 move_duplicates=False,
                 move_uploaded=False,
                 move_sequences=False,
                 save_as_json=False,
                 list_file_status=False,
                 push_images=False,
                 skip_subfolders=False,
                 verbose=False,
                 save_local_mapping=False):

    # return if nothing specified
    if not any([summarize, move_all_images, list_file_status, push_images, move_duplicates, move_uploaded, save_local_mapping, move_sequences]):
        print("No post processing action specified.")
        return

    # sanity check if video file is passed
    if video_import_path and not os.path.isdir(video_import_path) and not os.path.isfile(video_import_path):
        print("Error, video path " + video_import_path +
              " does not exist, exiting...")
        sys.exit(1)
    if move_all_images:
        move_sequences = True
        move_duplicates = True
        move_uploaded = True
    # in case of video processing, adjust the import path
    if video_import_path:
        # set sampling path
        video_sampling_path = "mapillary_sampled_video_frames"
        video_dirname = video_import_path if os.path.isdir(
            video_import_path) else os.path.dirname(video_import_path)
        import_path = os.path.join(os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join(
            os.path.abspath(video_dirname), video_sampling_path)

    # basic check for all
    if not import_path or not os.path.isdir(import_path):
        print("Error, import directory " + import_path +
              " does not exist, exiting...")
        sys.exit(1)
    if save_local_mapping:
        local_mapping = save_local_mapping(import_path)
        with open(local_mapping_filepath, "w") as csvfile:
            csvwriter = csv.writer(csvfile, delimiter=",")
            for row in local_mapping:
                csvwriter.writerow(row)
    if push_images:
        to_be_pushed_files = uploader.get_success_only_manual_upload_file_list(
            import_path, skip_subfolders)
        params = {}
        for image in tqdm(to_be_pushed_files, desc="Pushing images"):
            log_root = uploader.log_rootpath(image)
            upload_params_path = os.path.join(
                log_root, "upload_params_process.json")
            if os.path.isfile(upload_params_path):
                with open(upload_params_path, "rb") as jf:
                    params[image] = json.load(
                        jf, object_hook=uploader.ascii_encode_dict)

        # get the s3 locations of the sequences
        finalize_params = uploader.process_upload_finalization(
            to_be_pushed_files, params)
        uploader.finalize_upload(finalize_params)
        # flag finalization for each file
        uploader.flag_finalization(to_be_pushed_files)

    if any([summarize, list_file_status, move_uploaded]):
        # upload logs
        uploaded_files = uploader.get_success_upload_file_list(
            import_path, skip_subfolders)
        uploaded_files_count = len(uploaded_files)
        failed_upload_files = uploader.get_failed_upload_file_list(
            import_path, skip_subfolders)
        failed_upload_files_count = len(failed_upload_files)
        to_be_finalized_files = uploader.get_finalize_file_list(import_path)
        to_be_finalized_files_count = len(to_be_finalized_files)
        to_be_uploaded_files = uploader.get_upload_file_list(
            import_path, skip_subfolders)
        to_be_uploaded_files_count = len(to_be_uploaded_files)
    if any([summarize, move_sequences]):
        total_files = uploader.get_total_file_list(import_path)
        total_files_count = len(total_files)
    if any([summarize, move_duplicates, list_file_status]):
        duplicates_file_list = processing.get_duplicate_file_list(
            import_path, skip_subfolders)
        duplicates_file_list_count = len(duplicates_file_list)
    if summarize:
        summary_dict = {}
        summary_dict["total images"] = total_files_count
        summary_dict["upload summary"] = {
            "successfully uploaded": uploaded_files_count,
            "failed uploads": failed_upload_files_count,
            "uploaded to be finalized": to_be_finalized_files_count
        }
        # process logs
        summary_dict["process summary"] = {}
        process_steps = ["user_process", "import_meta_process", "geotag_process",
                         "sequence_process", "upload_params_process", "mapillary_image_description"]
        process_status = ["success", "failed"]
        for step in process_steps:

            process_success = len(processing.get_process_status_file_list(
                import_path, step, "success", skip_subfolders))
            process_failed = len(processing.get_process_status_file_list(
                import_path, step, "failed", skip_subfolders))
            summary_dict["process summary"][step] = {
                "failed": process_failed,
                "success": process_success
            }
        summary_dict["process summary"]["duplicates"] = duplicates_file_list_count
        summary_dict["process summary"]["processed_not_yet_uploaded"] = to_be_uploaded_files_count
        print("Import summary for import path {} :".format(import_path))
        print(json.dumps(summary_dict, indent=4))

        ipc.send('summary', summary_dict)

        if save_as_json:
            try:
                processing.save_json(summary_dict, os.path.join(
                    import_path, "mapillary_import_summary.json"))
            except Exception as e:
                print("Could not save summary into json at {}, due to {}".format(
                    os.path.join(import_path, "mapillary_import_summary.json"), e))
    if list_file_status:
        status_list_dict = {}
        status_list_dict["successfully uploaded"] = uploaded_files
        status_list_dict["failed uploads"] = failed_upload_files
        status_list_dict["uploaded to be finalized"] = to_be_finalized_files
        status_list_dict["duplicates"] = duplicates_file_list
        status_list_dict["processed_not_yet_uploaded"] = to_be_uploaded_files
        print("")
        print("List of file status for import path {} :".format(import_path))
        print(json.dumps(status_list_dict, indent=4))
        if save_as_json:
            try:
                processing.save_json(status_list_dict, os.path.join(
                    import_path, "mapillary_import_image_status_list.json"))
            except Exception as e:
                print("Could not save image status list into json at {}, due to {}".format(
                    os.path.join(import_path, "mapillary_import_image_status_list.json"), e))
    split_import_path = split_import_path if split_import_path else import_path
    if any([move_sequences, move_duplicates, move_uploaded]):
        if not os.path.isdir(split_import_path):
            print("Split import path {} does not exist.".format(
                split_import_path))
            sys.exit(1)

    destination_mapping = {}
    if move_duplicates:
        for image in duplicates_file_list:
            destination_mapping[image] = {"basic": ["duplicates"]}
    if move_uploaded:
        for image in uploaded_files:
            if image in destination_mapping:
                destination_mapping[image]["basic"].append("uploaded")
            else:
                destination_mapping[image] = {"basic": ["uploaded"]}
        for image in failed_upload_files:
            if image in destination_mapping:
                destination_mapping[image]["basic"].append("failed_upload")
            else:
                destination_mapping[image] = {"basic": ["failed_upload"]}
        for image in to_be_finalized_files:
            if image in destination_mapping:
                destination_mapping[image]["basic"].append(
                    "uploaded_not_finalized")
            else:
                destination_mapping[image] = {
                    "basic": ["uploaded_not_finalized"]}
        for image in to_be_uploaded_files:
            if image in destination_mapping:
                destination_mapping[image]["basic"].append("to_be_uploaded")
            else:
                destination_mapping[image] = {"basic": ["to_be_uploaded"]}
    if move_sequences:
        destination_mapping = map_images_to_sequences(
            destination_mapping, total_files)
    for image in destination_mapping:
        basic_destination = destination_mapping[image]["basic"] if "basic" in destination_mapping[image] else [
        ]
        sequence_destination = destination_mapping[image][
            "sequence"] if "sequence" in destination_mapping[image] else ""
        image_destination_path = os.path.join(*([split_import_path] + basic_destination + [
                                              os.path.dirname(image[len(os.path.abspath(import_path)) + 1:])] + [sequence_destination, os.path.basename(image)]))
        if not os.path.isdir(os.path.dirname(image_destination_path)):
            os.makedirs(os.path.dirname(image_destination_path))
        os.rename(image, image_destination_path)
        image_logs_dir = uploader.log_rootpath(image)
        destination_logs_dir = uploader.log_rootpath(image_destination_path)
        if not os.path.isdir(image_logs_dir):
            continue
        if not os.path.isdir(os.path.dirname(destination_logs_dir)):
            os.makedirs(os.path.dirname(destination_logs_dir))
        os.rename(image_logs_dir, destination_logs_dir)
Example #18
0
def interpolation(data,
                  file_in_path=None,
                  file_format="csv",
                  time_column=0,
                  delimiter=",",
                  time_utc=False,
                  time_format="%Y-%m-%dT%H:%M:%SZ",
                  header=False,
                  keep_original=False,
                  import_path=None,
                  max_time_delta=1,
                  verbose=False):

    if not data:
        print("Error, you must specify the data for interpolation.")
        print('Choose between "missing_gps" or "identical_timestamps"')
        sys.exit(1)

    if not import_path and not file_in_path:
        print(
            "Error, you must specify a path to data, either path to directory with images or path to an external log file."
        )
        sys.exit(1)

    if file_in_path:
        if not os.path.isfile(file_in_path):
            print("Error, specified input file does not exist, exiting...")
            sys.exit(1)
        if file_format != "csv":
            print(
                "Only csv file format is supported at the moment, exiting...")
            sys.exit(1)

        csv_data = process_csv.read_csv(file_in_path,
                                        delimiter=delimiter,
                                        header=header)

        if data == "identical_timestamps":
            timestamps = csv_data[time_column]
            timestamps_datetime = [
                process_csv.format_time(timestamp, time_utc, time_format)
                for timestamp in timestamps
            ]

            timestamps_interpolated = processing.interpolate_timestamp(
                timestamps_datetime)

            csv_data[time_column] = format_datetime(timestamps_interpolated,
                                                    time_utc, time_format)

            file_out = file_in_path if not keep_original else file_in_path[:
                                                                           -4] + "_processed." + file_format

            with open(file_out, "w") as csvfile:
                csvwriter = csv.writer(csvfile, delimiter=delimiter)
                for row in zip(*csv_data):
                    csvwriter.writerow(row)
            sys.exit()
        elif data == "missing_gps":
            print(
                "Error, missing gps interpolation in an external log file not supported yet, exiting..."
            )
            sys.exit(1)
        else:
            print("Error unsupported data for interpolation, exiting...")
            sys.exit(1)

    if import_path:
        if not os.path.isdir(import_path):
            print("Error, specified import path does not exist, exiting...")
            sys.exit(1)

        # get list of files to process
        process_file_list = uploader.get_total_file_list(import_path)
        if not len(process_file_list):
            print("No images found in the import path " + import_path)
            sys.exit(1)

        if data == "missing_gps":
            # get geotags from images and a list of tuples with images missing geotags
            # and their timestamp
            geotags, missing_geotags = processing.get_images_geotags(
                process_file_list)
            if not len(missing_geotags):
                print("No images in directory {} missing geotags, exiting...".
                      format(import_path))
                sys.exit(1)
            if not len(geotags):
                print("No images in directory {} with geotags.".format(
                    import_path))
                sys.exit(1)

            sys.stdout.write(
                "Interpolating gps for {} images missing geotags.".format(
                    len(missing_geotags)))

            for image, timestamp in tqdm(missing_geotags,
                                         desc="Interpolating missing gps"):
                # interpolate
                try:
                    lat, lon, bearing, elevation = interpolate_lat_lon(
                        geotags, timestamp, max_time_delta)
                except Exception as e:
                    print(
                        "Error, {}, interpolation of latitude and longitude failed for image {}"
                        .format(e, image))
                    continue
                # insert into exif
                exif_edit = ExifEdit(image)
                if lat and lon:
                    exif_edit.add_lat_lon(lat, lon)
                else:
                    print("Error, lat and lon not interpolated for image {}.".
                          format(image))
                if bearing:
                    exif_edit.add_direction(bearing)
                else:
                    if verbose:
                        print(
                            "Warning, bearing not interpolated for image {}.".
                            format(image))
                if elevation:
                    exif_edit.add_altitude(elevation)
                else:
                    if verbose:
                        print(
                            "Warning, altitude not interpolated for image {}.".
                            format(image))

                meta = {}

                add_meta_tag(meta, "booleans", "interpolated_gps", True)

                exif_edit.add_image_history(meta["MAPMetaTags"])

                file_out = image if not keep_original else image[:-4] + "_processed."
                exif_edit.write(filename=file_out)

        elif data == "identical_timestamps":

            sys.stdout.write("Loading image timestamps.")

            # read timestamps
            timestamps = []
            for image in tqdm(process_file_list,
                              desc="Interpolating identical timestamps"):

                # load exif
                exif = ExifRead(image)
                timestamp = exif.extract_capture_time()
                if timestamp:
                    timestamps.append(timestamp)
                else:
                    print(
                        "Capture could not be extracted for image {}.".format(
                            image))

            # interpolate
            timestamps_interpolated = processing.interpolate_timestamp(
                timestamps)

            print("")
            sys.stdout.write("Interpolating identical timestamps.")
            counter = 0

            # write back
            for image, timestamp in tqdm(
                    zip(process_file_list, timestamps_interpolated),
                    desc="Writing capture time in image EXIF"):

                # print progress
                counter += 1
                sys.stdout.write('.')
                if (counter % 100) == 0:
                    print("")

                # load exif
                exif_edit = ExifEdit(image)
                exif_edit.add_date_time_original(timestamp)

                # write to exif
                file_out = image if not keep_original else image[:-4] + "_processed."
                exif_edit.write(filename=file_out)

            sys.exit()
        else:
            print("Error unsupported data for interpolation, exiting...")
            sys.exit(1)
    print("")
Example #19
0
def upload(import_path,
           verbose=False,
           skip_subfolders=False,
           video_file=None,
           number_threads=None,
           max_attempts=None):
    '''
    Upload local images to Mapillary
    Args:
        import_path: Directory path to where the images are stored.
        verbose: Print extra warnings and errors.
        skip_subfolders: Skip images stored in subdirectories.

    Returns:
        Images are uploaded to Mapillary and flagged locally as uploaded.
    '''
    # sanity check if video file is passed
    if video_file and not (os.path.isdir(video_file)
                           or os.path.isfile(video_file)):
        print("Error, video path " + video_file +
              " does not exist, exiting...")
        sys.exit(1)

    # in case of video processing, adjust the import path
    if video_file:
        # set sampling path
        video_sampling_path = "mapillary_sampled_video_frames"
        import_path = os.path.join(
            os.path.abspath(import_path),
            video_sampling_path) if import_path else os.path.join(
                os.path.dirname(video_file), video_sampling_path)

    # basic check for all
    if not import_path or not os.path.isdir(import_path):
        print("Error, import directory " + import_path +
              " does not exist, exiting...")
        sys.exit(1)

    # get list of file to process
    total_file_list = uploader.get_total_file_list(import_path,
                                                   skip_subfolders)
    upload_file_list = uploader.get_upload_file_list(import_path,
                                                     skip_subfolders)
    failed_file_list = uploader.get_failed_upload_file_list(
        import_path, skip_subfolders)
    success_file_list = uploader.get_success_upload_file_list(
        import_path, skip_subfolders)

    if len(success_file_list) == len(total_file_list):
        print("All images have already been uploaded")
    else:
        if len(failed_file_list):
            upload_failed = raw_input(
                "Retry uploading previously failed image uploads? [y/n]: ")
            # if yes, add images to the upload list
            if upload_failed in ["y", "Y", "yes", "Yes"]:
                upload_file_list.extend(failed_file_list)

        # verify the images in the upload list, they need to have the image
        # description and certain MAP properties
        upload_file_list = [
            f for f in upload_file_list if verify_mapillary_tag(f)
        ]

        if not len(upload_file_list):
            print("No images to upload.")
            print(
                'Please check if all images contain the required Mapillary metadata. If not, you can use "mapillary_tools process" to add them'
            )
            sys.exit(1)

        # get upload params for the manual upload images, group them per sequence
        # and separate direct upload images
        params = {}
        list_per_sequence_mapping = {}
        direct_upload_file_list = []
        for image in upload_file_list:
            log_root = uploader.log_rootpath(image)
            upload_params_path = os.path.join(log_root,
                                              "upload_params_process.json")
            if os.path.isfile(upload_params_path):
                with open(upload_params_path, "rb") as jf:
                    params[image] = json.load(
                        jf, object_hook=uploader.ascii_encode_dict)
                    sequence = params[image]["key"]
                    if sequence in list_per_sequence_mapping:
                        list_per_sequence_mapping[sequence].append(image)
                    else:
                        list_per_sequence_mapping[sequence] = [image]
            else:
                direct_upload_file_list.append(image)

        # inform how many images are to be uploaded and how many are being skipped
        # from upload

        print("Uploading {} images with valid mapillary tags (Skipping {})".
              format(len(upload_file_list),
                     len(total_file_list) - len(upload_file_list)))

        if len(direct_upload_file_list):
            uploader.upload_file_list_direct(direct_upload_file_list,
                                             number_threads, max_attempts)
        for idx, sequence in enumerate(list_per_sequence_mapping):
            uploader.upload_file_list_manual(
                list_per_sequence_mapping[sequence], params, idx,
                number_threads, max_attempts)

        uploader.print_summary(upload_file_list)
Example #20
0
def process_csv(import_path,
                csv_path,
                filename_column=None,
                timestamp_column=None,
                latitude_column=None,
                longitude_column=None,
                heading_column=None,
                altitude_column=None,
                gps_week_column=None,
                time_format="%Y:%m:%d %H:%M:%S.%f",
                convert_gps_time=False,
                convert_utc_time=False,
                delimiter=",",
                header=False,
                meta_columns=None,
                meta_names=None,
                meta_types=None,
                verbose=False,
                keep_original=False):

    # sanity checks
    if not import_path or not os.path.isdir(import_path):
        print("Error, import directory " + import_path +
              " doesnt not exist, exiting...")
        sys.exit(1)

    if not csv_path or not os.path.isfile(csv_path):
        print("Error, csv file not provided or does not exist. Please specify a valid path to a csv file.")
        sys.exit(1)

    # get list of file to process
    process_file_list = uploader.get_total_file_list(import_path)
    if not len(process_file_list):
        print("No images found in the import path " + import_path)
        sys.exit(1)

    if gps_week_column != None and convert_gps_time == False:
        print("Error, in order to parse timestamp provided as a combination of GPS week and GPS seconds, you must specify timestamp column and flag --convert_gps_time, exiting...")
        sys.exit(1)

    if (convert_gps_time != False or convert_utc_time != False) and timestamp_column == None:
        print("Error, if specifying a flag to convert timestamp, timestamp column must be provided, exiting...")
        sys.exit(1)

    column_indexes = [filename_column, timestamp_column,
                      latitude_column, longitude_column, heading_column, altitude_column, gps_week_column]

    if any([column == 0 for column in column_indexes]):
        print("Error, csv column numbers start with 1, one of the columns specified is 0.")
        sys.exit(1)

    column_indexes = map(lambda x: x - 1 if x else None, column_indexes)

    # checks for meta arguments if any
    meta_columns, meta_names, meta_types = validate_meta_data(
        meta_columns, meta_names, meta_types)

    # open and process csv
    csv_data = read_csv(csv_path,
                        delimiter=delimiter,
                        header=header)

    # align by filename column if provided, otherwise align in order of image
    # names
    file_names = None
    if filename_column:
        file_names = csv_data[filename_column - 1]
    else:
        if verbose:
            print("Warning, filename column not provided, images will be aligned with the csv data in order of the image filenames.")

    # process each image
    for idx, image in tqdm(enumerate(process_file_list), desc="Inserting csv data in image EXIF"):

        # get image entry index
        image_index = get_image_index(image, file_names) if file_names else idx
        if image_index == None:
            print("Warning, no entry found in csv file for image " + image)
            continue

        # get required data
        timestamp, lat, lon, heading, altitude = parse_csv_geotag_data(
            csv_data, image_index, column_indexes, convert_gps_time, convert_utc_time, time_format)

        # get meta data
        meta = parse_csv_meta_data(
            csv_data, image_index, meta_columns, meta_types, meta_names)

        # insert in image EXIF
        exif_edit = ExifEdit(image)
        if timestamp:
            exif_edit.add_date_time_original(timestamp)
        if lat and lon:
            exif_edit.add_lat_lon(lat, lon)
        if heading:
            exif_edit.add_direction(heading)
        if altitude:
            exif_edit.add_altitude(altitude)
        if meta:
            exif_edit.add_image_history(meta["MAPMetaTags"])

        filename = image
        filename_keep_original = processing.processed_images_rootpath(image)

        if os.path.isfile(filename_keep_original):
            os.remove(filename_keep_original)

        if keep_original:
            if not os.path.isdir(os.path.dirname(filename_keep_original)):
                os.makedirs(os.path.dirname(filename_keep_original))
            filename = filename_keep_original

        try:
            exif_edit.write(filename=filename)
        except:
            print("Error, image EXIF could not be written back for image " + image)
            return None
Example #21
0
def post_process(import_path,
                 split_import_path=None,
                 video_import_path=None,
                 summarize=False,
                 move_all_images=False,
                 move_duplicates=False,
                 move_uploaded=False,
                 move_sequences=False,
                 save_as_json=False,
                 list_file_status=False,
                 push_images=False,
                 skip_subfolders=False,
                 verbose=False,
                 save_local_mapping=False):

    # return if nothing specified
    if not any([
            summarize, move_all_images, list_file_status, push_images,
            move_duplicates, move_uploaded, save_local_mapping, move_sequences
    ]):
        print("No post processing action specified.")
        return

    # sanity check if video file is passed
    if video_import_path and not os.path.isdir(
            video_import_path) and not os.path.isfile(video_import_path):
        print("Error, video path " + video_import_path +
              " does not exist, exiting...")
        sys.exit(1)
    if move_all_images:
        move_sequences = True
        move_duplicates = True
        move_uploaded = True
    # in case of video processing, adjust the import path
    if video_import_path:
        # set sampling path
        video_sampling_path = "mapillary_sampled_video_frames"
        video_dirname = video_import_path if os.path.isdir(
            video_import_path) else os.path.dirname(video_import_path)
        import_path = os.path.join(
            os.path.abspath(import_path),
            video_sampling_path) if import_path else os.path.join(
                os.path.abspath(video_dirname), video_sampling_path)

    # basic check for all
    if not import_path or not os.path.isdir(import_path):
        print("Error, import directory " + import_path +
              " does not exist, exiting...")
        sys.exit(1)
    if save_local_mapping:
        local_mapping = save_local_mapping(import_path)
        with open(local_mapping_filepath, "w") as csvfile:
            csvwriter = csv.writer(csvfile, delimiter=",")
            for row in local_mapping:
                csvwriter.writerow(row)
    if push_images:
        to_be_pushed_files = uploader.get_success_only_manual_upload_file_list(
            import_path, skip_subfolders)
        params = {}
        for image in tqdm(to_be_pushed_files, desc="Pushing images"):
            log_root = uploader.log_rootpath(image)
            upload_params_path = os.path.join(log_root,
                                              "upload_params_process.json")
            if os.path.isfile(upload_params_path):
                with open(upload_params_path, "rb") as jf:
                    params[image] = json.load(
                        jf, object_hook=uploader.ascii_encode_dict)

        # get the s3 locations of the sequences
        finalize_params = uploader.process_upload_finalization(
            to_be_pushed_files, params)
        uploader.finalize_upload(finalize_params)
        # flag finalization for each file
        uploader.flag_finalization(to_be_pushed_files)

    if any([summarize, list_file_status, move_uploaded]):
        # upload logs
        uploaded_files = uploader.get_success_upload_file_list(
            import_path, skip_subfolders)
        uploaded_files_count = len(uploaded_files)
        failed_upload_files = uploader.get_failed_upload_file_list(
            import_path, skip_subfolders)
        failed_upload_files_count = len(failed_upload_files)
        to_be_finalized_files = uploader.get_finalize_file_list(import_path)
        to_be_finalized_files_count = len(to_be_finalized_files)
        to_be_uploaded_files = uploader.get_upload_file_list(
            import_path, skip_subfolders)
        to_be_uploaded_files_count = len(to_be_uploaded_files)
    if any([summarize, move_sequences]):
        total_files = uploader.get_total_file_list(import_path)
        total_files_count = len(total_files)
    if any([summarize, move_duplicates, list_file_status]):
        duplicates_file_list = processing.get_duplicate_file_list(
            import_path, skip_subfolders)
        duplicates_file_list_count = len(duplicates_file_list)
    if summarize:
        summary_dict = {}
        summary_dict["total images"] = total_files_count
        summary_dict["upload summary"] = {
            "successfully uploaded": uploaded_files_count,
            "failed uploads": failed_upload_files_count,
            "uploaded to be finalized": to_be_finalized_files_count
        }
        # process logs
        summary_dict["process summary"] = {}
        process_steps = [
            "user_process", "import_meta_process", "geotag_process",
            "sequence_process", "upload_params_process",
            "mapillary_image_description"
        ]
        process_status = ["success", "failed"]
        for step in process_steps:

            process_success = len(
                processing.get_process_status_file_list(
                    import_path, step, "success", skip_subfolders))
            process_failed = len(
                processing.get_process_status_file_list(
                    import_path, step, "failed", skip_subfolders))
            summary_dict["process summary"][step] = {
                "failed": process_failed,
                "success": process_success
            }
        summary_dict["process summary"][
            "duplicates"] = duplicates_file_list_count
        summary_dict["process summary"][
            "processed_not_yet_uploaded"] = to_be_uploaded_files_count
        print("Import summary for import path {} :".format(import_path))
        print(json.dumps(summary_dict, indent=4))

        ipc.send('summary', summary_dict)

        if save_as_json:
            try:
                processing.save_json(
                    summary_dict,
                    os.path.join(import_path, "mapillary_import_summary.json"))
            except Exception as e:
                print(
                    "Could not save summary into json at {}, due to {}".format(
                        os.path.join(import_path,
                                     "mapillary_import_summary.json"), e))
    if list_file_status:
        status_list_dict = {}
        status_list_dict["successfully uploaded"] = uploaded_files
        status_list_dict["failed uploads"] = failed_upload_files
        status_list_dict["uploaded to be finalized"] = to_be_finalized_files
        status_list_dict["duplicates"] = duplicates_file_list
        status_list_dict["processed_not_yet_uploaded"] = to_be_uploaded_files
        print("")
        print("List of file status for import path {} :".format(import_path))
        print(json.dumps(status_list_dict, indent=4))
        if save_as_json:
            try:
                processing.save_json(
                    status_list_dict,
                    os.path.join(import_path,
                                 "mapillary_import_image_status_list.json"))
            except Exception as e:
                print(
                    "Could not save image status list into json at {}, due to {}"
                    .format(
                        os.path.join(
                            import_path,
                            "mapillary_import_image_status_list.json"), e))
    split_import_path = split_import_path if split_import_path else import_path
    if any([move_sequences, move_duplicates, move_uploaded]):
        if not os.path.isdir(split_import_path):
            print("Split import path {} does not exist.".format(
                split_import_path))
            sys.exit(1)

    destination_mapping = {}
    if move_duplicates:
        for image in duplicates_file_list:
            destination_mapping[image] = {"basic": ["duplicates"]}
    if move_uploaded:
        for image in uploaded_files:
            if image in destination_mapping:
                destination_mapping[image]["basic"].append("uploaded")
            else:
                destination_mapping[image] = {"basic": ["uploaded"]}
        for image in failed_upload_files:
            if image in destination_mapping:
                destination_mapping[image]["basic"].append("failed_upload")
            else:
                destination_mapping[image] = {"basic": ["failed_upload"]}
        for image in to_be_finalized_files:
            if image in destination_mapping:
                destination_mapping[image]["basic"].append(
                    "uploaded_not_finalized")
            else:
                destination_mapping[image] = {
                    "basic": ["uploaded_not_finalized"]
                }
        for image in to_be_uploaded_files:
            if image in destination_mapping:
                destination_mapping[image]["basic"].append("to_be_uploaded")
            else:
                destination_mapping[image] = {"basic": ["to_be_uploaded"]}
    if move_sequences:
        destination_mapping = map_images_to_sequences(destination_mapping,
                                                      total_files)
    for image in destination_mapping:
        basic_destination = destination_mapping[image][
            "basic"] if "basic" in destination_mapping[image] else []
        sequence_destination = destination_mapping[image][
            "sequence"] if "sequence" in destination_mapping[image] else ""
        image_destination_path = os.path.join(*(
            [split_import_path] + basic_destination +
            [os.path.dirname(image[len(os.path.abspath(import_path)) + 1:])] +
            [sequence_destination,
             os.path.basename(image)]))
        if not os.path.isdir(os.path.dirname(image_destination_path)):
            os.makedirs(os.path.dirname(image_destination_path))
        os.rename(image, image_destination_path)
        image_logs_dir = uploader.log_rootpath(image)
        destination_logs_dir = uploader.log_rootpath(image_destination_path)
        if not os.path.isdir(image_logs_dir):
            continue
        if not os.path.isdir(os.path.dirname(destination_logs_dir)):
            os.makedirs(os.path.dirname(destination_logs_dir))
        os.rename(image_logs_dir, destination_logs_dir)