def load_geotag_points(process_file_list, verbose=False):

    file_list = []
    capture_times = []
    lats = []
    lons = []
    directions = []

    for image in tqdm(process_file_list, desc="Loading geotag points"):
        log_root = uploader.log_rootpath(image)
        geotag_data = get_geotag_data(log_root,
                                      image,
                                      verbose)
        if not geotag_data:
            create_and_log_process(image,
                                   "sequence_process",
                                   "failed",
                                   verbose=verbose)
            continue
        # assume all data needed available from this point on
        file_list.append(image)
        capture_times.append(datetime.datetime.strptime(geotag_data["MAPCaptureTime"],
                                                        '%Y_%m_%d_%H_%M_%S_%f'))
        lats.append(geotag_data["MAPLatitude"])
        lons.append(geotag_data["MAPLongitude"])
        directions.append(geotag_data["MAPCompassHeading"]["TrueHeading"]
                          ) if "MAPCompassHeading" in geotag_data else directions.append(0.0)

        # remove previously created duplicate flags
        duplicate_flag_path = os.path.join(log_root,
                                           "duplicate")
        if os.path.isfile(duplicate_flag_path):
            os.remove(duplicate_flag_path)

    return file_list, capture_times, lats, lons, directions
def preform_process(file_path, process, rerun=False):
    log_root = uploader.log_rootpath(file_path)
    process_succes = os.path.join(log_root, process + "_success")
    upload_succes = os.path.join(log_root, "upload_success")
    preform = not os.path.isfile(upload_succes) and (
        not os.path.isfile(process_succes) or rerun)
    return preform
Beispiel #3
0
def save_local_mapping(import_path):
    local_mapping_filepath = os.path.join(os.path.dirname(
        import_path), import_path + "_mapillary_image_uuid_to_local_path_mapping.csv")

    total_files = uploader.get_total_file_list(import_path)

    local_mapping = []
    for file in tqdm(total_files, desc="Reading image uuids"):
        image_file_uuid = None
        relative_path = file.lstrip(os.path.abspath(import_path))
        log_rootpath = uploader.log_rootpath(file)
        image_description_json_path = os.path.join(
            log_rootpath, "mapillary_image_description.json")
        if os.path.isfile(image_description_json_path):
            image_description_json = processing.load_json(
                image_description_json_path)
            if "MAPPhotoUUID" in image_description_json:
                image_file_uuid = image_description_json["MAPPhotoUUID"]
            else:
                print(
                    "Error, photo uuid not in mapillary_image_description.json log file.")
        else:
            image_exif = exif_read.ExifRead(file)
            image_description = json.loads(
                image_exif.extract_image_description())
            if "MAPPhotoUUID" in image_description:
                image_file_uuid = str(image_description["MAPPhotoUUID"])
            else:
                print("Warning, image {} EXIF does not contain mapillary image description and mapillary_image_description.json log file does not exist. Try to process the image using mapillary_tools.".format(file))
        if image_file_uuid:
            local_mapping.append((relative_path, image_file_uuid))
    return local_mapping
def map_images_to_sequences(destination_mapping, total_files):
    unique_sequence_uuids = []
    sequence_counter = 0
    for image in tqdm(total_files, desc="Reading sequence information stored in log files"):
        log_root = uploader.log_rootpath(image)
        sequence_data_path = os.path.join(
            log_root, "sequence_process.json")
        sequence_uuid = ""
        sequence_data = None
        if os.path.isfile(sequence_data_path):
            sequence_data = processing.load_json(sequence_data_path)
        if sequence_data and "MAPSequenceUUID" in sequence_data:
            sequence_uuid = sequence_data["MAPSequenceUUID"]
        if sequence_uuid:
            if sequence_uuid not in unique_sequence_uuids:
                sequence_counter += 1
                unique_sequence_uuids.append(sequence_uuid)
            if image in destination_mapping:
                destination_mapping[image]["sequence"] = str(sequence_counter)
            else:
                destination_mapping[image] = {
                    "sequence": str(sequence_counter)}
        else:
            print("MAPSequenceUUID could not be read for image {}".format(image))
    return destination_mapping
def preform_process(file_path, process, rerun=False):
    log_root = uploader.log_rootpath(file_path)
    process_succes = os.path.join(log_root, process + "_success")
    upload_succes = os.path.join(log_root, "upload_success")
    preform = not os.path.isfile(upload_succes) and (
        not os.path.isfile(process_succes) or rerun)
    return preform
def load_geotag_points(process_file_list, verbose=False):

    file_list = []
    capture_times = []
    lats = []
    lons = []
    directions = []

    for image in tqdm(process_file_list, desc="Loading geotag points"):
        log_root = uploader.log_rootpath(image)
        geotag_data = get_geotag_data(log_root,
                                      image,
                                      verbose)
        if not geotag_data:
            create_and_log_process(image,
                                   "sequence_process",
                                   "failed",
                                   verbose=verbose)
            continue
        # assume all data needed available from this point on
        file_list.append(image)
        capture_times.append(datetime.datetime.strptime(geotag_data["MAPCaptureTime"],
                                                        '%Y_%m_%d_%H_%M_%S_%f'))
        lats.append(geotag_data["MAPLatitude"])
        lons.append(geotag_data["MAPLongitude"])
        directions.append(geotag_data["MAPCompassHeading"]["TrueHeading"]
                          ) if "MAPCompassHeading" in geotag_data else directions.append(0.0)

        # remove previously created duplicate flags
        duplicate_flag_path = os.path.join(log_root,
                                           "duplicate")
        if os.path.isfile(duplicate_flag_path):
            os.remove(duplicate_flag_path)

    return file_list, capture_times, lats, lons, directions
Beispiel #7
0
def map_images_to_sequences(destination_mapping, total_files):
    unique_sequence_uuids = []
    sequence_counter = 0
    for image in tqdm(total_files,
                      desc="Reading sequence information stored in log files"):
        log_root = uploader.log_rootpath(image)
        sequence_data_path = os.path.join(log_root, "sequence_process.json")
        sequence_uuid = ""
        sequence_data = None
        if os.path.isfile(sequence_data_path):
            sequence_data = processing.load_json(sequence_data_path)
        if sequence_data and "MAPSequenceUUID" in sequence_data:
            sequence_uuid = sequence_data["MAPSequenceUUID"]
        if sequence_uuid:
            if sequence_uuid not in unique_sequence_uuids:
                sequence_counter += 1
                unique_sequence_uuids.append(sequence_uuid)
            if image in destination_mapping:
                destination_mapping[image]["sequence"] = str(sequence_counter)
            else:
                destination_mapping[image] = {
                    "sequence": str(sequence_counter)
                }
        else:
            print(
                "MAPSequenceUUID could not be read for image {}".format(image))
    return destination_mapping
def save_local_mapping(import_path):
    local_mapping_filepath = os.path.join(os.path.dirname(
        import_path), import_path + "_mapillary_image_uuid_to_local_path_mapping.csv")

    total_files = uploader.get_total_file_list(import_path)

    local_mapping = []
    for file in tqdm(total_files, desc="Reading image uuids"):
        image_file_uuid = None
        relative_path = file.lstrip(os.path.abspath(import_path))
        log_rootpath = uploader.log_rootpath(file)
        image_description_json_path = os.path.join(
            log_rootpath, "mapillary_image_description.json")
        if os.path.isfile(image_description_json_path):
            image_description_json = processing.load_json(
                image_description_json_path)
            if "MAPPhotoUUID" in image_description_json:
                image_file_uuid = image_description_json["MAPPhotoUUID"]
            else:
                print(
                    "Error, photo uuid not in mapillary_image_description.json log file.")
        else:
            image_exif = exif_read.ExifRead(file)
            image_description = json.loads(
                image_exif.extract_image_description())
            if "MAPPhotoUUID" in image_description:
                image_file_uuid = str(image_description["MAPPhotoUUID"])
            else:
                print("Warning, image {} EXIF does not contain mapillary image description and mapillary_image_description.json log file does not exist. Try to process the image using mapillary_tools.".format(file))
        if image_file_uuid:
            local_mapping.append((relative_path, image_file_uuid))
    return local_mapping
Beispiel #9
0
def create_and_log_process(image,
                           process,
                           status,
                           mapillary_description={},
                           verbose=False):
    # set log path
    log_root = uploader.log_rootpath(image)
    # make all the dirs if not there
    if not os.path.isdir(log_root):
        os.makedirs(log_root)
    # set the log flags for process
    log_process = os.path.join(log_root, process)
    log_process_succes = log_process + "_success"
    log_process_failed = log_process + "_failed"
    log_MAPJson = os.path.join(log_root, process + ".json")

    if status == "success" and not mapillary_description:
        status = "failed"
    elif status == "success":
        try:
            save_json(mapillary_description, log_MAPJson)
            open(log_process_succes, "w").close()
            open(
                log_process_succes + "_" +
                str(time.strftime("%Y_%m_%d_%H_%M_%S", time.gmtime())),
                "w").close()
            # if there is a failed log from before, remove it
            if os.path.isfile(log_process_failed):
                os.remove(log_process_failed)
        except:
            # if the image description could not have been written to the
            # filesystem, log failed
            print_error("Error, " + process + " logging failed for image " +
                        image)
            status = "failed"

    if status == "failed":
        open(log_process_failed, "w").close()
        open(
            log_process_failed + "_" +
            str(time.strftime("%Y_%m_%d_%H_%M_%S", time.gmtime())),
            "w").close()
        # if there is a success log from before, remove it
        if os.path.isfile(log_process_succes):
            os.remove(log_process_succes)
        # if there is meta data from before, remove it
        if os.path.isfile(log_MAPJson):
            if verbose:
                print(
                    "Warning, {} in this run has failed, previously generated properties will be removed."
                    .format(process))
            os.remove(log_MAPJson)

    ipc.send(process, {
        'image': image,
        'status': status,
        'description': mapillary_description
    })
Beispiel #10
0
def video_import_paths(video_file):
    log_root = uploader.log_rootpath(video_file)
    if not os.path.isdir(log_root):
        return []
    log_process = os.path.join(log_root, "video_process.json")
    if not os.path.isfile(log_process):
        return []
    video_process = load_json(log_process)
    if "sample_paths" in video_process:
        return video_process["sample_paths"]
    return []
Beispiel #11
0
def video_import_paths(video_file):
    log_root = uploader.log_rootpath(video_file)
    if not os.path.isdir(log_root):
        return []
    log_process = os.path.join(
        log_root, "video_process.json")
    if not os.path.isfile(log_process):
        return []
    video_process = load_json(log_process)
    if "sample_paths" in video_process:
        return video_process["sample_paths"]
    return []
Beispiel #12
0
def create_and_log_video_process(video_file, import_path):
    log_root = uploader.log_rootpath(video_file)
    if not os.path.isdir(log_root):
        os.makedirs(log_root)
    # set the log flags for process
    log_process = os.path.join(log_root, "video_process.json")
    import_paths = video_import_paths(video_file)
    if import_path in import_paths:
        return
    import_paths.append(import_path)
    video_process = load_json(log_process)
    video_process.update({"sample_paths": import_paths})
    save_json(video_process, log_process)
Beispiel #13
0
def create_and_log_video_process(video_file, import_path):
    log_root = uploader.log_rootpath(video_file)
    if not os.path.isdir(log_root):
        os.makedirs(log_root)
    # set the log flags for process
    log_process = os.path.join(
        log_root, "video_process.json")
    import_paths = video_import_paths(video_file)
    if import_path in import_paths:
        return
    import_paths.append(import_path)
    video_process = load_json(log_process)
    video_process.update({"sample_paths": import_paths})
    save_json(video_process, log_process)
Beispiel #14
0
def create_and_log_process(image, process, status, mapillary_description={}, verbose=False):
    # set log path
    log_root = uploader.log_rootpath(image)
    # make all the dirs if not there
    if not os.path.isdir(log_root):
        os.makedirs(log_root)
    # set the log flags for process
    log_process = os.path.join(
        log_root, process)
    log_process_succes = log_process + "_success"
    log_process_failed = log_process + "_failed"
    log_MAPJson = os.path.join(log_root, process + ".json")

    if status == "success" and not mapillary_description:
        status = "failed"
    elif status == "success":
        try:
            save_json(mapillary_description, log_MAPJson)
            open(log_process_succes, "w").close()
            open(log_process_succes + "_" +
                 str(time.strftime("%Y_%m_%d_%H_%M_%S", time.gmtime())), "w").close()
            # if there is a failed log from before, remove it
            if os.path.isfile(log_process_failed):
                os.remove(log_process_failed)
        except:
            # if the image description could not have been written to the
            # filesystem, log failed
            print_error("Error, " + process +
                        " logging failed for image " + image)
            status = "failed"

    if status == "failed":
        open(log_process_failed, "w").close()
        open(log_process_failed + "_" +
             str(time.strftime("%Y_%m_%d_%H_%M_%S", time.gmtime())), "w").close()
        # if there is a success log from before, remove it
        if os.path.isfile(log_process_succes):
            os.remove(log_process_succes)
        # if there is meta data from before, remove it
        if os.path.isfile(log_MAPJson):
            if verbose:
                print("Warning, {} in this run has failed, previously generated properties will be removed.".format(
                    process))
            os.remove(log_MAPJson)

    decoded_image = force_decode(image)

    ipc.send(
        process,
        {'image': decoded_image, 'status': status, 'description': mapillary_description})
def video_upload(video_file, import_path, verbose=False):
    log_root = uploader.log_rootpath(video_file)
    import_paths = video_import_paths(video_file)
    if not os.path.isdir(import_path):
        os.makedirs(import_path)
    if import_path not in import_paths:
        import_paths.append(import_path)
    else:
        print("Warning, {} has already been sampled into {}, please make sure all the previously sampled frames are deleted, otherwise the alignment might be incorrect".format(video_file, import_path))
    for video_import_path in import_paths:
        if os.path.isdir(video_import_path):
            if len(uploader.get_success_upload_file_list(video_import_path)):
                if verbose:
                    print("no")
                return 1
    return 0
Beispiel #16
0
def video_upload(video_file, import_path, verbose=False):
    log_root = uploader.log_rootpath(video_file)
    import_paths = video_import_paths(video_file)
    if not os.path.isdir(import_path):
        os.makedirs(import_path)
    if import_path not in import_paths:
        import_paths.append(import_path)
    else:
        print("Warning, {} has already been sampled into {}, please make sure all the previously sampled frames are deleted, otherwise the alignment might be incorrect".format(video_file, import_path))
    for video_import_path in import_paths:
        if os.path.isdir(video_import_path):
            if len(uploader.get_success_upload_file_list(video_import_path)):
                if verbose:
                    print("no")
                return 1
    return 0
def insert_MAPJson(import_path,
                   master_upload=False,
                   verbose=False,
                   rerun=False,
                   skip_subfolders=False,
                   skip_EXIF_insert=False):

    # basic check for all
    import_path = os.path.abspath(import_path)
    if not os.path.isdir(import_path):
        print("Error, import directory " + import_path +
              " doesnt not exist, exiting...")
        sys.exit()

    # get list of file to process
    process_file_list = processing.get_process_file_list(import_path,
                                                         "mapillary_image_description",
                                                         rerun,
                                                         verbose,
                                                         skip_subfolders)
    if not len(process_file_list):
        print("No images to run process finalization")
        print("If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun")

    for image in process_file_list:
        # check the processing logs
        log_root = uploader.log_rootpath(import_path,
                                         image)

        duplicate_path = os.path.join(log_root,
                                      "duplicate")

        if os.path.isfile(duplicate_path):
            continue

        final_mapillary_image_description = processing.get_final_mapillary_image_description(log_root,
                                                                                             image,
                                                                                             master_upload,
                                                                                             verbose,
                                                                                             skip_EXIF_insert)

        processing.create_and_log_process(image,
                                          import_path,
                                          "mapillary_image_description",
                                          "success",
                                          final_mapillary_image_description,
                                          verbose=verbose)
Beispiel #18
0
def load_geotag_points(process_file_list, verbose=False):

    file_list = []
    capture_times = []
    lats = []
    lons = []
    directions = []

    if verbose:
        sys.stdout.write("Loading geotag points...")
    progress_count = 0
    for image in process_file_list:
        progress_count += 1
        if verbose:
            if (progress_count % 50) == 0:
                sys.stdout.write(".")
            if (progress_count % 5000) == 0:
                print("")
                # check the status of the geotagging
        log_root = uploader.log_rootpath(image)
        geotag_data = get_geotag_data(log_root, image, verbose)
        if not geotag_data:
            create_and_log_process(image,
                                   "sequence_process",
                                   "failed",
                                   verbose=verbose)
            continue
        # assume all data needed available from this point on
        file_list.append(image)
        capture_times.append(
            datetime.datetime.strptime(geotag_data["MAPCaptureTime"],
                                       '%Y_%m_%d_%H_%M_%S_%f'))
        lats.append(geotag_data["MAPLatitude"])
        lons.append(geotag_data["MAPLongitude"])
        directions.append(
            geotag_data["MAPCompassHeading"]["TrueHeading"]
        ) if "MAPCompassHeading" in geotag_data else directions.append(0.0)

        # remove previously created duplicate flags
        duplicate_flag_path = os.path.join(log_root, "duplicate")
        if os.path.isfile(duplicate_flag_path):
            os.remove(duplicate_flag_path)

    return file_list, capture_times, lats, lons, directions
Beispiel #19
0
def insert_MAPJson(import_path,
                   master_upload=False,
                   verbose=False,
                   rerun=False,
                   skip_subfolders=False,
                   skip_EXIF_insert=False,
                   keep_original=False):

    # basic check for all
    import_path = os.path.abspath(import_path)
    if not os.path.isdir(import_path):
        print("Error, import directory " + import_path +
              " doesnt not exist, exiting...")
        sys.exit()

    # get list of file to process
    process_file_list = processing.get_process_file_list(
        import_path, "mapillary_image_description", rerun, verbose,
        skip_subfolders)
    if not len(process_file_list):
        print("No images to run process finalization")
        print(
            "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun"
        )

    for image in process_file_list:
        # check the processing logs
        log_root = uploader.log_rootpath(image)

        duplicate_path = os.path.join(log_root, "duplicate")

        if os.path.isfile(duplicate_path):
            continue

        final_mapillary_image_description = processing.get_final_mapillary_image_description(
            log_root, image, master_upload, verbose, skip_EXIF_insert,
            keep_original)

        processing.create_and_log_process(image,
                                          "mapillary_image_description",
                                          "success",
                                          final_mapillary_image_description,
                                          verbose=verbose)
Beispiel #20
0
def video_upload(video_file, import_path, verbose=False):
    log_root = uploader.log_rootpath(video_file)
    import_paths = video_import_paths(video_file)
    if os.path.isdir(import_path):
        print(
            "Warning, {} has already been sampled into {}, previously sampled frames will be deleted"
            .format(video_file, import_path))
        shutil.rmtree(import_path)
    if not os.path.isdir(import_path):
        os.makedirs(import_path)
    if import_path not in import_paths:
        import_paths.append(import_path)
    for video_import_path in import_paths:
        if os.path.isdir(video_import_path):
            if len(uploader.get_success_upload_file_list(video_import_path)):
                if verbose:
                    print("no")
                return 1
    return 0
Beispiel #21
0
def video_upload(video_file, import_path, verbose=False):
    root_path = os.path.dirname(os.path.abspath(video_file))
    log_root = uploader.log_rootpath(root_path, video_file)
    import_paths = video_import_paths(video_file)
    if os.path.isdir(import_path):
        if verbose:
            print("Warning, {} has already been sampled into {}, previously sampled frames will be deleted".format(
                video_file, import_path))
        shutil.rmtree(import_path)
    if not os.path.isdir(import_path):
        os.makedirs(import_path)
    if import_path not in import_paths:
        import_paths.append(import_path)
    for video_import_path in import_paths:
        if os.path.isdir(video_import_path):
            if len(uploader.get_success_upload_file_list(video_import_path)):
                if verbose:
                    print("no")
                return 1
    return 0
def is_duplicate(file_path):
    log_root = uploader.log_rootpath(file_path)
    duplicate_flag_path = os.path.join(log_root, "duplicate")
    return os.path.isfile(duplicate_flag_path)
def process_status(file_path, process, status):
    log_root = uploader.log_rootpath(file_path)
    process_status = os.path.join(log_root, process + "_" + status)
    return os.path.isfile(process_status)
Beispiel #24
0
def upload(import_path,
           verbose=False,
           skip_subfolders=False,
           video_file=None,
           number_threads=None,
           max_attempts=None):
    '''
    Upload local images to Mapillary
    Args:
        import_path: Directory path to where the images are stored.
        verbose: Print extra warnings and errors.
        skip_subfolders: Skip images stored in subdirectories.

    Returns:
        Images are uploaded to Mapillary and flagged locally as uploaded.
    '''
    # sanity check if video file is passed
    if video_file and not (os.path.isdir(video_file)
                           or os.path.isfile(video_file)):
        print("Error, video path " + video_file +
              " does not exist, exiting...")
        sys.exit(1)

    # in case of video processing, adjust the import path
    if video_file:
        # set sampling path
        video_sampling_path = "mapillary_sampled_video_frames"
        import_path = os.path.join(
            os.path.abspath(import_path),
            video_sampling_path) if import_path else os.path.join(
                os.path.dirname(video_file), video_sampling_path)

    # basic check for all
    if not import_path or not os.path.isdir(import_path):
        print("Error, import directory " + import_path +
              " does not exist, exiting...")
        sys.exit(1)

    # get list of file to process
    total_file_list = uploader.get_total_file_list(import_path,
                                                   skip_subfolders)
    upload_file_list = uploader.get_upload_file_list(import_path,
                                                     skip_subfolders)
    failed_file_list = uploader.get_failed_upload_file_list(
        import_path, skip_subfolders)
    success_file_list = uploader.get_success_upload_file_list(
        import_path, skip_subfolders)

    if len(success_file_list) == len(total_file_list):
        print("All images have already been uploaded")
    else:
        if len(failed_file_list):
            upload_failed = raw_input(
                "Retry uploading previously failed image uploads? [y/n]: ")
            # if yes, add images to the upload list
            if upload_failed in ["y", "Y", "yes", "Yes"]:
                upload_file_list.extend(failed_file_list)

        # verify the images in the upload list, they need to have the image
        # description and certain MAP properties
        upload_file_list = [
            f for f in upload_file_list if verify_mapillary_tag(f)
        ]

        if not len(upload_file_list):
            print("No images to upload.")
            print(
                'Please check if all images contain the required Mapillary metadata. If not, you can use "mapillary_tools process" to add them'
            )
            sys.exit(1)

        # get upload params for the manual upload images, group them per sequence
        # and separate direct upload images
        params = {}
        list_per_sequence_mapping = {}
        direct_upload_file_list = []
        for image in upload_file_list:
            log_root = uploader.log_rootpath(image)
            upload_params_path = os.path.join(log_root,
                                              "upload_params_process.json")
            if os.path.isfile(upload_params_path):
                with open(upload_params_path, "rb") as jf:
                    params[image] = json.load(
                        jf, object_hook=uploader.ascii_encode_dict)
                    sequence = params[image]["key"]
                    if sequence in list_per_sequence_mapping:
                        list_per_sequence_mapping[sequence].append(image)
                    else:
                        list_per_sequence_mapping[sequence] = [image]
            else:
                direct_upload_file_list.append(image)

        # inform how many images are to be uploaded and how many are being skipped
        # from upload

        print("Uploading {} images with valid mapillary tags (Skipping {})".
              format(len(upload_file_list),
                     len(total_file_list) - len(upload_file_list)))

        if len(direct_upload_file_list):
            uploader.upload_file_list_direct(direct_upload_file_list,
                                             number_threads, max_attempts)
        for idx, sequence in enumerate(list_per_sequence_mapping):
            uploader.upload_file_list_manual(
                list_per_sequence_mapping[sequence], params, idx,
                number_threads, max_attempts)

        uploader.print_summary(upload_file_list)
def process_sequence_properties(import_path,
                                cutoff_distance=600.0,
                                cutoff_time=60.0,
                                interpolate_directions=False,
                                flag_duplicates=False,
                                duplicate_distance=0.1,
                                duplicate_angle=5,
                                offset_angle=0.0,
                                verbose=False,
                                rerun=False,
                                skip_subfolders=False):
    # basic check for all
    import_path = os.path.abspath(import_path)
    if not os.path.isdir(import_path):
        print("Error, import directory " + import_path +
              " doesnt not exist, exiting...")
        sys.exit()

    sequences = []
    if skip_subfolders:
        process_file_list = processing.get_process_file_list(import_path,
                                                             "sequence_process",
                                                             rerun,
                                                             verbose,
                                                             True,
                                                             import_path)
        if not len(process_file_list):
            if verbose:
                print("No images to run sequence process in root " + import_path)
                print(
                    "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun")
        else:
            # LOAD TIME AND GPS POINTS ------------------------------------
            file_list, capture_times, lats, lons, directions = processing.load_geotag_points(
                process_file_list, import_path, verbose)
            # ---------------------------------------

            # SPLIT SEQUENCES --------------------------------------
            if len(capture_times) and len(lats) and len(lons):
                sequences.extend(processing.split_sequences(
                    capture_times, lats, lons, file_list, directions, cutoff_time, cutoff_distance, verbose))
        # ---------------------------------------
    else:
        # sequence limited to the root of the files
        for root, dirs, files in os.walk(import_path):
            if ".mapillary" in root:
                continue
            if len(files):
                process_file_list = processing.get_process_file_list(import_path,
                                                                     "sequence_process",
                                                                     rerun,
                                                                     verbose,
                                                                     True,
                                                                     root)
                if not len(process_file_list):
                    if verbose:
                        print("No images to run sequence process in root " + root)
                        print(
                            "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun")
                    continue

                # LOAD TIME AND GPS POINTS ------------------------------------
                file_list, capture_times, lats, lons, directions = processing.load_geotag_points(
                    process_file_list, import_path, verbose)
                # ---------------------------------------

                # SPLIT SEQUENCES --------------------------------------
                if len(capture_times) and len(lats) and len(lons):
                    sequences.extend(processing.split_sequences(
                        capture_times, lats, lons, file_list, directions, cutoff_time, cutoff_distance, verbose))
                # ---------------------------------------

    # process for each sequence
    for sequence in sequences:
        file_list = sequence["file_list"]
        directions = sequence["directions"]
        latlons = sequence["latlons"]
        capture_times = sequence["capture_times"]

        # COMPUTE DIRECTIONS --------------------------------------
        interpolated_directions = [compute_bearing(ll1[0], ll1[1], ll2[0], ll2[1])
                                   for ll1, ll2 in zip(latlons, latlons[1:])]
        interpolated_directions.append(directions[-1])
        # use interpolated directions if direction not available or if flag for
        # interpolate_directions
        for i, d in enumerate(directions):
            directions[i] = d if (
                d is not None and not interpolate_directions) else (interpolated_directions[i] + offset_angle) % 360.0
        # ---------------------------------------

        # INTERPOLATE TIMESTAMPS, incase of identical timestamps
        capture_times, file_list = processing.interpolate_timestamp(capture_times,
                                                                    file_list)

        final_file_list = file_list[:]
        final_directions = directions[:]
        final_capture_times = capture_times[:]

        # FLAG DUPLICATES --------------------------------------
        if flag_duplicates:
            final_file_list = [file_list[0]]
            final_directions = [directions[0]]
            prev_latlon = latlons[0]
            prev_direction = directions[0]
            for i, filename in enumerate(file_list[1:]):
                log_root = uploader.log_rootpath(import_path,
                                                 filename)
                duplicate_flag_path = os.path.join(log_root,
                                                   "duplicate")
                sequence_process_success_path = os.path.join(log_root,
                                                             "sequence_process_success")
                k = i + 1
                distance = gps_distance(latlons[k],
                                        prev_latlon)
                if directions[k] is not None and prev_direction is not None:
                    direction_diff = diff_bearing(directions[k],
                                                  prev_direction)
                else:
                    # dont use bearing difference if no bearings are
                    # available
                    direction_diff = 360
                if distance < duplicate_distance and direction_diff < duplicate_angle:
                    open(duplicate_flag_path, "w").close()
                    open(sequence_process_success_path, "w").close()
                    open(sequence_process_success_path + "_" +
                         str(time.strftime("%Y_%m_%d_%H_%M_%S", time.gmtime())), "w").close()
                else:
                    prev_latlon = latlons[k]
                    prev_direction = directions[k]
                    final_file_list.append(filename)
                    final_directions.append(directions[k])
        # ---------------------------------------

        # FINALIZE ------------------------------------
        for i in range(0, len(final_file_list), MAX_SEQUENCE_LENGTH):
            finalize_sequence_processing(str(uuid.uuid4()),
                                         final_file_list[i:i +
                                                         MAX_SEQUENCE_LENGTH],
                                         final_directions[i:i +
                                                          MAX_SEQUENCE_LENGTH],
                                         final_capture_times[i:i +
                                                             MAX_SEQUENCE_LENGTH],
                                         import_path,
                                         verbose)
Beispiel #26
0
def insert_MAPJson(import_path,
                   master_upload=False,
                   verbose=False,
                   rerun=False,
                   skip_subfolders=False,
                   skip_EXIF_insert=False,
                   keep_original=False,
                   video_file=None):

    # sanity check if video file is passed
    if video_file and not (os.path.isdir(video_file)
                           or os.path.isfile(video_file)):
        print("Error, video path " + video_file +
              " does not exist, exiting...")
        sys.exit(1)

    # in case of video processing, adjust the import path
    if video_file:
        # set sampling path
        video_sampling_path = processing.sampled_video_frames_rootpath(
            video_file)
        import_path = os.path.join(
            os.path.abspath(import_path),
            video_sampling_path) if import_path else os.path.join(
                os.path.dirname(video_file), video_sampling_path)

    # basic check for all
    if not import_path or not os.path.isdir(import_path):
        print("Error, import directory " + import_path +
              " does not exist, exiting...")
        sys.exit(1)

    # get list of file to process
    process_file_list = processing.get_process_file_list(
        import_path, "mapillary_image_description", rerun, verbose,
        skip_subfolders)
    if not len(process_file_list):
        print("No images to run process finalization")
        print(
            "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun"
        )

    progress_count = 0
    for image in process_file_list:
        progress_count += 1
        if verbose:
            if (progress_count % 50) == 0:
                sys.stdout.write(".")
            if (progress_count % 5000) == 0:
                print("")
        # check the processing logs
        log_root = uploader.log_rootpath(image)

        duplicate_path = os.path.join(log_root, "duplicate")

        if os.path.isfile(duplicate_path):
            continue

        final_mapillary_image_description = processing.get_final_mapillary_image_description(
            log_root, image, master_upload, verbose, skip_EXIF_insert,
            keep_original)

        processing.create_and_log_process(image,
                                          "mapillary_image_description",
                                          "success",
                                          final_mapillary_image_description,
                                          verbose=verbose)

    print("Sub process finished")
Beispiel #27
0
def is_duplicate(file_path):
    log_root = uploader.log_rootpath(file_path)
    duplicate_flag_path = os.path.join(log_root, "duplicate")
    return os.path.isfile(duplicate_flag_path)
Beispiel #28
0
def post_process(import_path,
                 split_import_path=None,
                 video_import_path=None,
                 summarize=False,
                 move_all_images=False,
                 move_duplicates=False,
                 move_uploaded=False,
                 move_sequences=False,
                 save_as_json=False,
                 list_file_status=False,
                 push_images=False,
                 skip_subfolders=False,
                 verbose=False,
                 save_local_mapping=False):

    # return if nothing specified
    if not any([
            summarize, move_all_images, list_file_status, push_images,
            move_duplicates, move_uploaded, save_local_mapping, move_sequences
    ]):
        print("No post processing action specified.")
        return

    # sanity check if video file is passed
    if video_import_path and not os.path.isdir(
            video_import_path) and not os.path.isfile(video_import_path):
        print("Error, video path " + video_import_path +
              " does not exist, exiting...")
        sys.exit(1)
    if move_all_images:
        move_sequences = True
        move_duplicates = True
        move_uploaded = True
    # in case of video processing, adjust the import path
    if video_import_path:
        # set sampling path
        video_sampling_path = "mapillary_sampled_video_frames"
        video_dirname = video_import_path if os.path.isdir(
            video_import_path) else os.path.dirname(video_import_path)
        import_path = os.path.join(
            os.path.abspath(import_path),
            video_sampling_path) if import_path else os.path.join(
                os.path.abspath(video_dirname), video_sampling_path)

    # basic check for all
    if not import_path or not os.path.isdir(import_path):
        print("Error, import directory " + import_path +
              " does not exist, exiting...")
        sys.exit(1)
    if save_local_mapping:
        local_mapping = save_local_mapping(import_path)
        with open(local_mapping_filepath, "w") as csvfile:
            csvwriter = csv.writer(csvfile, delimiter=",")
            for row in local_mapping:
                csvwriter.writerow(row)
    if push_images:
        to_be_pushed_files = uploader.get_success_only_manual_upload_file_list(
            import_path, skip_subfolders)
        params = {}
        for image in tqdm(to_be_pushed_files, desc="Pushing images"):
            log_root = uploader.log_rootpath(image)
            upload_params_path = os.path.join(log_root,
                                              "upload_params_process.json")
            if os.path.isfile(upload_params_path):
                with open(upload_params_path, "rb") as jf:
                    params[image] = json.load(
                        jf, object_hook=uploader.ascii_encode_dict)

        # get the s3 locations of the sequences
        finalize_params = uploader.process_upload_finalization(
            to_be_pushed_files, params)
        uploader.finalize_upload(finalize_params)
        # flag finalization for each file
        uploader.flag_finalization(to_be_pushed_files)

    if any([summarize, list_file_status, move_uploaded]):
        # upload logs
        uploaded_files = uploader.get_success_upload_file_list(
            import_path, skip_subfolders)
        uploaded_files_count = len(uploaded_files)
        failed_upload_files = uploader.get_failed_upload_file_list(
            import_path, skip_subfolders)
        failed_upload_files_count = len(failed_upload_files)
        to_be_finalized_files = uploader.get_finalize_file_list(import_path)
        to_be_finalized_files_count = len(to_be_finalized_files)
        to_be_uploaded_files = uploader.get_upload_file_list(
            import_path, skip_subfolders)
        to_be_uploaded_files_count = len(to_be_uploaded_files)
    if any([summarize, move_sequences]):
        total_files = uploader.get_total_file_list(import_path)
        total_files_count = len(total_files)
    if any([summarize, move_duplicates, list_file_status]):
        duplicates_file_list = processing.get_duplicate_file_list(
            import_path, skip_subfolders)
        duplicates_file_list_count = len(duplicates_file_list)
    if summarize:
        summary_dict = {}
        summary_dict["total images"] = total_files_count
        summary_dict["upload summary"] = {
            "successfully uploaded": uploaded_files_count,
            "failed uploads": failed_upload_files_count,
            "uploaded to be finalized": to_be_finalized_files_count
        }
        # process logs
        summary_dict["process summary"] = {}
        process_steps = [
            "user_process", "import_meta_process", "geotag_process",
            "sequence_process", "upload_params_process",
            "mapillary_image_description"
        ]
        process_status = ["success", "failed"]
        for step in process_steps:

            process_success = len(
                processing.get_process_status_file_list(
                    import_path, step, "success", skip_subfolders))
            process_failed = len(
                processing.get_process_status_file_list(
                    import_path, step, "failed", skip_subfolders))
            summary_dict["process summary"][step] = {
                "failed": process_failed,
                "success": process_success
            }
        summary_dict["process summary"][
            "duplicates"] = duplicates_file_list_count
        summary_dict["process summary"][
            "processed_not_yet_uploaded"] = to_be_uploaded_files_count
        print("Import summary for import path {} :".format(import_path))
        print(json.dumps(summary_dict, indent=4))

        ipc.send('summary', summary_dict)

        if save_as_json:
            try:
                processing.save_json(
                    summary_dict,
                    os.path.join(import_path, "mapillary_import_summary.json"))
            except Exception as e:
                print(
                    "Could not save summary into json at {}, due to {}".format(
                        os.path.join(import_path,
                                     "mapillary_import_summary.json"), e))
    if list_file_status:
        status_list_dict = {}
        status_list_dict["successfully uploaded"] = uploaded_files
        status_list_dict["failed uploads"] = failed_upload_files
        status_list_dict["uploaded to be finalized"] = to_be_finalized_files
        status_list_dict["duplicates"] = duplicates_file_list
        status_list_dict["processed_not_yet_uploaded"] = to_be_uploaded_files
        print("")
        print("List of file status for import path {} :".format(import_path))
        print(json.dumps(status_list_dict, indent=4))
        if save_as_json:
            try:
                processing.save_json(
                    status_list_dict,
                    os.path.join(import_path,
                                 "mapillary_import_image_status_list.json"))
            except Exception as e:
                print(
                    "Could not save image status list into json at {}, due to {}"
                    .format(
                        os.path.join(
                            import_path,
                            "mapillary_import_image_status_list.json"), e))
    split_import_path = split_import_path if split_import_path else import_path
    if any([move_sequences, move_duplicates, move_uploaded]):
        if not os.path.isdir(split_import_path):
            print("Split import path {} does not exist.".format(
                split_import_path))
            sys.exit(1)

    destination_mapping = {}
    if move_duplicates:
        for image in duplicates_file_list:
            destination_mapping[image] = {"basic": ["duplicates"]}
    if move_uploaded:
        for image in uploaded_files:
            if image in destination_mapping:
                destination_mapping[image]["basic"].append("uploaded")
            else:
                destination_mapping[image] = {"basic": ["uploaded"]}
        for image in failed_upload_files:
            if image in destination_mapping:
                destination_mapping[image]["basic"].append("failed_upload")
            else:
                destination_mapping[image] = {"basic": ["failed_upload"]}
        for image in to_be_finalized_files:
            if image in destination_mapping:
                destination_mapping[image]["basic"].append(
                    "uploaded_not_finalized")
            else:
                destination_mapping[image] = {
                    "basic": ["uploaded_not_finalized"]
                }
        for image in to_be_uploaded_files:
            if image in destination_mapping:
                destination_mapping[image]["basic"].append("to_be_uploaded")
            else:
                destination_mapping[image] = {"basic": ["to_be_uploaded"]}
    if move_sequences:
        destination_mapping = map_images_to_sequences(destination_mapping,
                                                      total_files)
    for image in destination_mapping:
        basic_destination = destination_mapping[image][
            "basic"] if "basic" in destination_mapping[image] else []
        sequence_destination = destination_mapping[image][
            "sequence"] if "sequence" in destination_mapping[image] else ""
        image_destination_path = os.path.join(*(
            [split_import_path] + basic_destination +
            [os.path.dirname(image[len(os.path.abspath(import_path)) + 1:])] +
            [sequence_destination,
             os.path.basename(image)]))
        if not os.path.isdir(os.path.dirname(image_destination_path)):
            os.makedirs(os.path.dirname(image_destination_path))
        os.rename(image, image_destination_path)
        image_logs_dir = uploader.log_rootpath(image)
        destination_logs_dir = uploader.log_rootpath(image_destination_path)
        if not os.path.isdir(image_logs_dir):
            continue
        if not os.path.isdir(os.path.dirname(destination_logs_dir)):
            os.makedirs(os.path.dirname(destination_logs_dir))
        os.rename(image_logs_dir, destination_logs_dir)
Beispiel #29
0
def process_upload_params(import_path,
                          user_name,
                          master_upload=False,
                          verbose=False,
                          rerun=False,
                          skip_subfolders=False,
                          video_import_path=None):

    # sanity check if video file is passed
    if video_import_path and not os.path.isdir(
            video_import_path) and not os.path.isfile(video_import_path):
        print("Error, video path " + video_import_path +
              " does not exist, exiting...")
        sys.exit(1)

    # in case of video processing, adjust the import path
    if video_import_path:
        # set sampling path
        video_sampling_path = "mapillary_sampled_video_frames"
        video_dirname = video_import_path if os.path.isdir(
            video_import_path) else os.path.dirname(video_import_path)
        import_path = os.path.join(
            os.path.abspath(import_path),
            video_sampling_path) if import_path else os.path.join(
                os.path.abspath(video_dirname), video_sampling_path)

    # basic check for all
    if not import_path or not os.path.isdir(import_path):
        print_error("Error, import directory " + import_path +
                    " does not exist, exiting...")
        sys.exit(1)

    # get list of file to process
    process_file_list = processing.get_process_file_list(
        import_path, "upload_params_process", rerun, verbose, skip_subfolders)
    if not len(process_file_list):
        print("No images to run upload params process")
        print(
            "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun"
        )

    # sanity checks
    if not user_name:
        print_error("Error, must provide a valid user name, exiting...")
        processing.create_and_log_process_in_list(
            process_file_list, "upload_params_process"
            "failed", verbose)
        sys.exit(1)

    if not master_upload:
        try:
            credentials = uploader.authenticate_user(user_name)
        except:
            print_error("Error, user authentication failed for user " +
                        user_name)
            processing.create_and_log_process_in_list(
                process_file_list, "upload_params_process"
                "failed", verbose)
            sys.exit(1)
        if credentials == None or "user_upload_token" not in credentials or "user_permission_hash" not in credentials or "user_signature_hash" not in credentials:
            print_error("Error, user authentication failed for user " +
                        user_name)
            processing.create_and_log_process_in_list(
                process_file_list, "upload_params_process"
                "failed", verbose)
            sys.exit(1)

        user_upload_token = credentials["user_upload_token"]
        user_permission_hash = credentials["user_permission_hash"]
        user_signature_hash = credentials["user_signature_hash"]
        user_key = credentials["MAPSettingsUserKey"]

    for image in tqdm(process_file_list,
                      desc="Processing image upload parameters"):

        # check the status of the sequence processing
        log_root = uploader.log_rootpath(image)
        duplicate_flag_path = os.path.join(log_root, "duplicate")
        upload_params_path = os.path.join(log_root,
                                          "upload_params_process.json")

        if os.path.isfile(upload_params_path):
            os.remove(upload_params_path)

        if os.path.isfile(duplicate_flag_path) or master_upload:
            continue

        upload_params_properties = processing.get_upload_param_properties(
            log_root, image, user_name, user_upload_token,
            user_permission_hash, user_signature_hash, user_key, verbose)
        processing.create_and_log_process(image,
                                          "upload_params_process",
                                          "success",
                                          upload_params_properties,
                                          verbose=verbose)
        # flag manual upload
        log_manual_upload = os.path.join(log_root, "manual_upload")
        open(log_manual_upload, 'a').close()

    print("Sub process ended")
Beispiel #30
0
def process_status(file_path, process, status):
    log_root = uploader.log_rootpath(file_path)
    process_status = os.path.join(log_root, process + "_" + status)
    return os.path.isfile(process_status)
Beispiel #31
0
def upload(import_path,
           manual_done=False,
           verbose=False,
           skip_subfolders=False,
           video_file=None,
           number_threads=None,
           max_attempts=None):
    '''
    Upload local images to Mapillary
    Args:
        import_path: Directory path to where the images are stored.
        verbose: Print extra warnings and errors.
        skip_subfolders: Skip images stored in subdirectories.
        manual_done: Prompt user to confirm upload finalization.

    Returns:
        Images are uploaded to Mapillary and flagged locally as uploaded.
    '''
    # sanity check if video file is passed
    if video_file and not (os.path.isdir(video_file)
                           or os.path.isfile(video_file)):
        print("Error, video path " + video_file +
              " does not exist, exiting...")
        sys.exit(1)

    # in case of video processing, adjust the import path
    if video_file:
        # set sampling path
        video_sampling_path = processing.sampled_video_frames_rootpath(
            video_file)
        import_path = os.path.join(
            os.path.abspath(import_path),
            video_sampling_path) if import_path else os.path.join(
                os.path.dirname(video_file), video_sampling_path)

    # basic check for all
    if not import_path or not os.path.isdir(import_path):
        print("Error, import directory " + import_path +
              " does not exist, exiting...")
        sys.exit(1)

    # get list of file to process
    total_file_list = uploader.get_total_file_list(import_path,
                                                   skip_subfolders)
    upload_file_list = uploader.get_upload_file_list(import_path,
                                                     skip_subfolders)
    failed_file_list = uploader.get_failed_upload_file_list(
        import_path, skip_subfolders)
    success_file_list = uploader.get_success_upload_file_list(
        import_path, skip_subfolders)

    if len(success_file_list) == len(total_file_list):
        print("All images have already been uploaded")
        sys.exit()

    if len(failed_file_list):
        upload_failed = raw_input(
            "Retry uploading previously failed image uploads? [y/n]: ")
        # if yes, add images to the upload list
        if upload_failed in ["y", "Y", "yes", "Yes"]:
            upload_file_list.extend(failed_file_list)

    # verify the images in the upload list, they need to have the image
    # description and certain MAP properties
    upload_file_list = [f for f in upload_file_list if verify_mapillary_tag(f)]

    if not len(upload_file_list):
        print("No images to upload.")
        print(
            'Please check if all images contain the required Mapillary metadata. If not, you can use "mapillary_tools process" to add them'
        )
        sys.exit(1)

    # get upload params
    params = {}
    for image in total_file_list:
        log_root = uploader.log_rootpath(image)
        upload_params_path = os.path.join(log_root,
                                          "upload_params_process.json")
        if os.path.isfile(upload_params_path):
            with open(upload_params_path, "rb") as jf:
                params[image] = json.load(
                    jf, object_hook=uploader.ascii_encode_dict)

    # inform how many images are to be uploaded and how many are being skipped
    # from upload
    print("Uploading {} images with valid mapillary tags (Skipping {})".format(
        len(upload_file_list),
        len(total_file_list) - len(upload_file_list)))

    # call the actual upload, passing the list of images, the root of the
    # import and the upload params
    uploader.upload_file_list(upload_file_list, params, number_threads,
                              max_attempts)

    # finalize manual uploads if necessary
    finalize_file_list = uploader.get_finalize_file_list(
        import_path, skip_subfolders)

    # if manual uploads a DONE file needs to be uploaded to let the harvester
    # know the sequence is done uploading
    if len(finalize_file_list):
        finalize_all = 1
        if manual_done:
            finalize_all = uploader.prompt_to_finalize("uploads")
        if finalize_all:
            # get the s3 locations of the sequences
            finalize_params = uploader.process_upload_finalization(
                finalize_file_list, params)
            uploader.finalize_upload(finalize_params)
            # flag finalization for each file
            uploader.flag_finalization(finalize_file_list)
        else:
            print("Uploads will not be finalized.")
            print(
                "If you wish to finalize your uploads, run the upload tool again."
            )
            sys.exit()

    uploader.print_summary(upload_file_list)
def process_sequence_properties(import_path,
                                cutoff_distance=600.0,
                                cutoff_time=60.0,
                                interpolate_directions=False,
                                keep_duplicates=False,
                                duplicate_distance=0.1,
                                duplicate_angle=5,
                                offset_angle=0.0,
                                verbose=False,
                                rerun=False,
                                skip_subfolders=False,
                                video_import_path=None):

    # sanity check if video file is passed
    if video_import_path and not os.path.isdir(video_import_path) and not os.path.isfile(video_import_path):
        print("Error, video path " + video_import_path +
              " does not exist, exiting...")
        sys.exit(1)

    # in case of video processing, adjust the import path
    if video_import_path:
        # set sampling path
        video_sampling_path = "mapillary_sampled_video_frames"
        video_dirname = video_import_path if os.path.isdir(
            video_import_path) else os.path.dirname(video_import_path)
        import_path = os.path.join(os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join(
            os.path.abspath(video_dirname), video_sampling_path)

    # basic check for all
    if not import_path or not os.path.isdir(import_path):
        print_error("Error, import directory " + import_path +
                    " does not exist, exiting...")
        sys.exit(1)

    sequences = []
    if skip_subfolders:
        process_file_list = processing.get_process_file_list(import_path,
                                                             "sequence_process",
                                                             rerun,
                                                             verbose,
                                                             True,
                                                             import_path)
        if not len(process_file_list):
            if verbose:
                print("No images to run sequence process in root " + import_path)
                print(
                    "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun")
        else:
            # LOAD TIME AND GPS POINTS ------------------------------------
            file_list, capture_times, lats, lons, directions = processing.load_geotag_points(
                process_file_list, verbose)
            # ---------------------------------------

            # SPLIT SEQUENCES --------------------------------------
            if len(capture_times) and len(lats) and len(lons):
                sequences.extend(processing.split_sequences(
                    capture_times, lats, lons, file_list, directions, cutoff_time, cutoff_distance, verbose))
        # ---------------------------------------
    else:
        # sequence limited to the root of the files
        for root, dirs, files in os.walk(import_path):
            if os.path.join(".mapillary", "logs") in root:
                continue
            if len(files):
                process_file_list = processing.get_process_file_list(import_path,
                                                                     "sequence_process",
                                                                     rerun,
                                                                     verbose,
                                                                     True,
                                                                     root)
                if not len(process_file_list):
                    if verbose:
                        print("No images to run sequence process in root " + root)
                        print(
                            "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun")
                    continue
                # LOAD TIME AND GPS POINTS ------------------------------------
                file_list, capture_times, lats, lons, directions = processing.load_geotag_points(
                    process_file_list, verbose)
                # ---------------------------------------
                # SPLIT SEQUENCES --------------------------------------
                if len(capture_times) and len(lats) and len(lons):
                    sequences.extend(processing.split_sequences(
                        capture_times, lats, lons, file_list, directions, cutoff_time, cutoff_distance, verbose))
                # ---------------------------------------
    if not keep_duplicates:
        if verbose:
            print("Flagging images as duplicates if consecutive distance difference less than {} and angle difference less than {}".format(
                duplicate_distance, duplicate_angle))

    # process for each sequence
    for sequence in sequences:
        file_list = sequence["file_list"]
        directions = sequence["directions"]
        latlons = sequence["latlons"]
        capture_times = sequence["capture_times"]

        # COMPUTE DIRECTIONS --------------------------------------
        interpolated_directions = [compute_bearing(ll1[0], ll1[1], ll2[0], ll2[1])
                                   for ll1, ll2 in zip(latlons[:-1], latlons[1:])]
        if len(interpolated_directions):
            interpolated_directions.append(interpolated_directions[-1])
        else:
            interpolated_directions.append(directions[-1])
        # use interpolated directions if direction not available or if flag for
        # interpolate_directions
        for i, d in enumerate(directions):
            directions[i] = d if (
                d is not None and not interpolate_directions) else (interpolated_directions[i] + offset_angle) % 360.0
        # ---------------------------------------

        # COMPUTE SPEED -------------------------------------------
        computed_delta_ts = [(t1 - t0).total_seconds()
                             for t0, t1 in zip(capture_times[:-1], capture_times[1:])]
        computed_distances = [gps_distance(l1, l0)
                              for l0, l1 in zip(latlons[:-1], latlons[1:])]
        computed_speed = gps_speed(
            computed_distances, computed_delta_ts)  # in meters/second
        if len([x for x in computed_speed if x > MAX_CAPTURE_SPEED]) > 0:
            print("Warning: The distance in sequence including images\n{}\nto\n{}\nis too large for the time difference (very high apparent capture speed). Are you sure timestamps and locations are correct?".format(
                file_list[0], file_list[-1]))

        # INTERPOLATE TIMESTAMPS, in case of identical timestamps
        capture_times = processing.interpolate_timestamp(capture_times)

        final_file_list = file_list[:]
        final_directions = directions[:]
        final_capture_times = capture_times[:]
        # FLAG DUPLICATES --------------------------------------
        if not keep_duplicates:
            final_file_list = [file_list[0]]
            final_directions = [directions[0]]
            final_capture_times = [capture_times[0]]
            prev_latlon = latlons[0]
            prev_direction = directions[0]
            for i, filename in enumerate(file_list[1:]):
                log_root = uploader.log_rootpath(filename)
                duplicate_flag_path = os.path.join(log_root,
                                                   "duplicate")
                sequence_process_success_path = os.path.join(log_root,
                                                             "sequence_process_success")
                k = i + 1
                distance = gps_distance(latlons[k],
                                        prev_latlon)
                if directions[k] is not None and prev_direction is not None:
                    direction_diff = diff_bearing(directions[k],
                                                  prev_direction)
                else:
                    # dont use bearing difference if no bearings are
                    # available
                    direction_diff = 360
                if distance < duplicate_distance and direction_diff < duplicate_angle:
                    open(duplicate_flag_path, "w").close()
                    open(sequence_process_success_path, "w").close()
                    open(sequence_process_success_path + "_" +
                         str(time.strftime("%Y_%m_%d_%H_%M_%S", time.gmtime())), "w").close()
                else:
                    prev_latlon = latlons[k]
                    prev_direction = directions[k]
                    final_file_list.append(filename)
                    final_directions.append(directions[k])
                    final_capture_times.append(capture_times[k])
        # ---------------------------------------

        # FINALIZE ------------------------------------
        for i in range(0, len(final_file_list), MAX_SEQUENCE_LENGTH):
            finalize_sequence_processing(str(uuid.uuid4()),
                                         final_file_list[i:i +
                                                         MAX_SEQUENCE_LENGTH],
                                         final_directions[i:i +
                                                          MAX_SEQUENCE_LENGTH],
                                         final_capture_times[i:i +
                                                             MAX_SEQUENCE_LENGTH],
                                         import_path,
                                         verbose)
    print("Sub process ended")
def failed_process(file_path, process):
    log_root = uploader.log_rootpath(file_path)
    process_failed = os.path.join(log_root, process + "_failed")
    process_failed_true = os.path.isfile(process_failed)
    return process_failed_true
Beispiel #34
0
def post_process(import_path,
                 split_import_path=None,
                 video_import_path=None,
                 summarize=False,
                 move_images=False,
                 move_duplicates=False,
                 move_uploaded=False,
                 save_as_json=False,
                 list_file_status=False,
                 push_images=False,
                 skip_subfolders=False,
                 verbose=False,
                 save_local_mapping=False):

    # return if nothing specified
    if not summarize and not move_images and not list_file_status and not push_images and not move_duplicates and not move_uploaded and not save_local_mapping:
        print("No post processing action specified.")
        return

    # sanity check if video file is passed
    if video_import_path and not os.path.isdir(video_import_path):
        print("Error, video path " + video_import_path +
              " does not exist, exiting...")
        sys.exit(1)

    # in case of video processing, adjust the import path
    if video_import_path:
        # set sampling path
        video_sampling_path = "mapillary_sampled_video_frames"
        import_path = os.path.join(os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join(
            os.path.abspath(video_import_path), video_sampling_path)

    # basic check for all
    if not import_path or not os.path.isdir(import_path):
        print("Error, import directory " + import_path +
              " does not exist, exiting...")
        sys.exit(1)
    if save_local_mapping:
        local_mapping = save_local_mapping(import_path)
        with open(local_mapping_filepath, "w") as csvfile:
            csvwriter = csv.writer(csvfile, delimiter=",")
            for row in local_mapping:
                csvwriter.writerow(row)
    else:
        print("Reading import logs for import path {}...".format(import_path))

        # collect logs
        summary_dict = {}
        status_list_dict = {}

        total_files = uploader.get_total_file_list(import_path)
        total_files_count = len(total_files)

        # upload logs
        uploaded_files = uploader.get_success_upload_file_list(
            import_path, skip_subfolders)
        uploaded_files_count = len(uploaded_files)

        failed_upload_files = uploader.get_failed_upload_file_list(
            import_path, skip_subfolders)
        failed_upload_files_count = len(failed_upload_files)

        to_be_finalized_files = uploader.get_finalize_file_list(import_path)
        to_be_finalized_files_count = len(to_be_finalized_files)

        summary_dict["total images"] = total_files_count
        summary_dict["upload summary"] = {
            "successfully uploaded": uploaded_files_count,
            "failed uploads": failed_upload_files_count,
            "uploaded to be finalized": to_be_finalized_files_count
        }

        status_list_dict["successfully uploaded"] = uploaded_files
        status_list_dict["failed uploads"] = failed_upload_files
        status_list_dict["uploaded to be finalized"] = to_be_finalized_files

        # process logs
        summary_dict["process summary"] = {}
        process_steps = ["user_process", "import_meta_process", "geotag_process",
                         "sequence_process", "upload_params_process", "mapillary_image_description"]
        process_status = ["success", "failed"]
        for step in process_steps:

            process_success = len(processing.get_process_status_file_list(
                import_path, step, "success", skip_subfolders))
            process_failed = len(processing.get_process_status_file_list(
                import_path, step, "failed", skip_subfolders))

            summary_dict["process summary"][step] = {
                "failed": process_failed,
                "success": process_success
            }

        duplicates_file_list = processing.get_duplicate_file_list(
            import_path, skip_subfolders)
        duplicates_file_list_count = len(duplicates_file_list)

        summary_dict["process summary"]["duplicates"] = duplicates_file_list_count
        status_list_dict["duplicates"] = duplicates_file_list

        # processed for upload
        to_be_uploaded_files = uploader.get_upload_file_list(
            import_path, skip_subfolders)
        to_be_uploaded_files_count = len(to_be_uploaded_files)
        summary_dict["process summary"]["processed_not_yet_uploaded"] = to_be_uploaded_files_count
        status_list_dict["processed_not_yet_uploaded"] = to_be_uploaded_files

        # summary
        if summarize:
            print("")
            print("Import summary for import path {} :".format(import_path))
            print(json.dumps(summary_dict, indent=4))

            ipc.send('summary', summary_dict)

            if save_as_json:

                try:
                    processing.save_json(summary_dict, os.path.join(
                        import_path, "mapillary_import_summary.json"))
                except Exception as e:
                    print("Could not save summary into json at {}, due to {}".format(
                        os.path.join(import_path, "mapillary_import_summary.json"), e))

        # list file status
        if list_file_status:
            print("")
            print("List of file status for import path {} :".format(import_path))
            print(json.dumps(status_list_dict, indent=4))

            if save_as_json:

                try:
                    processing.save_json(status_list_dict, os.path.join(
                        import_path, "mapillary_import_image_status_list.json"))
                except Exception as e:
                    print("Could not save image status list into json at {}, due to {}".format(
                        os.path.join(import_path, "mapillary_import_image_status_list.json"), e))

        # push images that were uploaded successfully
        # collect upload params
        if push_images:
            to_be_pushed_files = uploader.get_success_only_manual_upload_file_list(
                import_path, skip_subfolders)
            params = {}
            for image in tqdm(to_be_pushed_files, desc="Pushing images"):
                log_root = uploader.log_rootpath(image)
                upload_params_path = os.path.join(
                    log_root, "upload_params_process.json")
                if os.path.isfile(upload_params_path):
                    with open(upload_params_path, "rb") as jf:
                        params[image] = json.load(
                            jf, object_hook=uploader.ascii_encode_dict)

            # get the s3 locations of the sequences
            finalize_params = uploader.process_upload_finalization(
                to_be_pushed_files, params)
            uploader.finalize_upload(finalize_params)
            # flag finalization for each file
            uploader.flag_finalization(to_be_pushed_files)

        if move_images or move_duplicates or move_uploaded:
            print("")
            print("Note that images will be moved along with their mapillary logs in order to preserve the import status")
            defualt_split_import_path = os.path.join(
                import_path, "mapillary_import_split_images")
            if not split_import_path:
                final_split_path = defualt_split_import_path
                print("")
                print(
                    "Split import path not provided and will therefore be set to default path {}".format(defualt_split_import_path))
            if split_import_path:
                if not os.path.isfile(split_import_path):
                    final_split_path = defualt_split_import_path
                    print("Split import path does not exist, split import path will be set to default path {}".format(
                        defualt_split_import_path))
                else:
                    final_split_path = split_import_path
            print("")
            print("Splitting import path {} into {} based on image import status...".format(
                import_path, final_split_path))
            if move_images:
                move_duplicates = True
                move_uploaded = True
                # move failed uploads
                if not len(failed_upload_files):
                    print("")
                    print(
                        "There are no failed upload images in the specified import path.")
                else:
                    failed_upload_path = os.path.join(
                        final_split_path, "upload_failed")

                    if not os.path.isdir(failed_upload_path):
                        os.makedirs(failed_upload_path)

                    for failed in failed_upload_files:
                        failed_upload_image_path = os.path.join(
                            failed_upload_path, os.path.basename(failed))
                        os.rename(failed, failed_upload_path)
                        failed_upload_log_path = os.path.dirname(uploader.log_rootpath(
                            failed_upload_image_path))
                        if not os.path.isdir(failed_upload_log_path):
                            os.makedirs(failed_upload_log_path)
                        shutil.move(uploader.log_rootpath(failed),
                                    failed_upload_log_path)
                    print("")
                    print("Done moving failed upload images to {}".format(
                        failed_upload_path))
            if move_duplicates:
                if not len(duplicates_file_list):
                    print("")
                    print("There were no duplicates flagged in the specified import path. If you are processing the images with mapillary_tools and would like to flag duplicates, you must specify --advanced --flag_duplicates")
                else:
                    duplicate_path = os.path.join(
                        final_split_path, "duplicates")
                    if not os.path.isdir(duplicate_path):
                        os.makedirs(duplicate_path)
                    for duplicate in duplicates_file_list:
                        duplicate_image_path = os.path.join(
                            duplicate_path, os.path.basename(duplicate))
                        os.rename(duplicate, duplicate_image_path)
                        duplicate_log_path = os.path.dirname(uploader.log_rootpath(
                            duplicate_image_path))
                        if not os.path.isdir(duplicate_log_path):
                            os.makedirs(duplicate_log_path)
                        shutil.move(uploader.log_rootpath(duplicate),
                                    duplicate_log_path)
                    print("")
                    print("Done moving duplicate images to {}".format(
                        duplicate_path))
            if move_uploaded:
                if not len(uploaded_files):
                    print("")
                    print(
                        "There are no successfuly uploaded images in the specified import path.")
                else:
                    upload_success_path = os.path.join(
                        final_split_path, "upload_success")

                    if not os.path.isdir(upload_success_path):
                        os.makedirs(upload_success_path)

                    for uploaded in uploaded_files:
                        uploaded_image_path = os.path.join(
                            upload_success_path, os.path.basename(uploaded))
                        os.rename(uploaded, upload_success_path)
                        uploaded_log_path = os.path.dirname(uploader.log_rootpath(
                            uploaded_image_path))
                        if not os.path.isdir(uploaded_log_path):
                            os.makedirs(uploaded_log_path)
                        shutil.move(uploader.log_rootpath(uploaded),
                                    uploaded_log_path)
                    print("")
                    print("Done moving successfully uploaded images to {}".format(
                        upload_success_path))
Beispiel #35
0
def failed_process(file_path, process):
    log_root = uploader.log_rootpath(file_path)
    process_failed = os.path.join(log_root, process + "_failed")
    process_failed_true = os.path.isfile(process_failed)
    return process_failed_true
def process_upload_params(import_path,
                          user_name,
                          master_upload=False,
                          verbose=False,
                          rerun=False,
                          skip_subfolders=False,
                          video_import_path=None):

    # sanity check if video file is passed
    if video_import_path and not os.path.isdir(video_import_path) and not os.path.isfile(video_import_path):
        print("Error, video path " + video_import_path +
              " does not exist, exiting...")
        sys.exit(1)

    # in case of video processing, adjust the import path
    if video_import_path:
        # set sampling path
        video_sampling_path = "mapillary_sampled_video_frames"
        video_dirname = video_import_path if os.path.isdir(
            video_import_path) else os.path.dirname(video_import_path)
        import_path = os.path.join(os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join(
            os.path.abspath(video_dirname), video_sampling_path)

    # basic check for all
    if not import_path or not os.path.isdir(import_path):
        print_error("Error, import directory " + import_path +
                    " does not exist, exiting...")
        sys.exit(1)

    # get list of file to process
    process_file_list = processing.get_process_file_list(import_path,
                                                         "upload_params_process",
                                                         rerun,
                                                         verbose,
                                                         skip_subfolders)
    if not len(process_file_list):
        print("No images to run upload params process")
        print("If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun")

    # sanity checks
    if not user_name:
        print_error("Error, must provide a valid user name, exiting...")
        processing.create_and_log_process_in_list(process_file_list,
                                                  "upload_params_process"
                                                  "failed",
                                                  verbose)
        sys.exit(1)

    if not master_upload:
        try:
            credentials = uploader.authenticate_user(user_name)
        except:
            print_error(
                "Error, user authentication failed for user " + user_name)
            processing.create_and_log_process_in_list(process_file_list,
                                                      "upload_params_process"
                                                      "failed",
                                                      verbose)
            sys.exit(1)
        if credentials == None or "user_upload_token" not in credentials or "user_permission_hash" not in credentials or "user_signature_hash" not in credentials:
            print_error(
                "Error, user authentication failed for user " + user_name)
            processing.create_and_log_process_in_list(process_file_list,
                                                      "upload_params_process"
                                                      "failed",
                                                      verbose)
            sys.exit(1)

        user_upload_token = credentials["user_upload_token"]
        user_permission_hash = credentials["user_permission_hash"]
        user_signature_hash = credentials["user_signature_hash"]
        user_key = credentials["MAPSettingsUserKey"]

    for image in tqdm(process_file_list, desc="Processing image upload parameters"):

        # check the status of the sequence processing
        log_root = uploader.log_rootpath(image)
        duplicate_flag_path = os.path.join(log_root,
                                           "duplicate")
        upload_params_path = os.path.join(
            log_root, "upload_params_process.json")

        if os.path.isfile(upload_params_path):
            os.remove(upload_params_path)

        if os.path.isfile(duplicate_flag_path) or master_upload:
            continue

        upload_params_properties = processing.get_upload_param_properties(log_root,
                                                                          image,
                                                                          user_name,
                                                                          user_upload_token,
                                                                          user_permission_hash,
                                                                          user_signature_hash,
                                                                          user_key,
                                                                          verbose)
        processing.create_and_log_process(image,
                                          "upload_params_process",
                                          "success",
                                          upload_params_properties,
                                          verbose=verbose)
        # flag manual upload
        log_manual_upload = os.path.join(
            log_root, "manual_upload")
        open(log_manual_upload, 'a').close()

    print("Sub process ended")
Beispiel #37
0
def insert_MAPJson(import_path,
                   master_upload=False,
                   verbose=False,
                   rerun=False,
                   skip_subfolders=False,
                   skip_EXIF_insert=False,
                   keep_original=False,
                   video_import_path=None,
                   overwrite_all_EXIF_tags=False,
                   overwrite_EXIF_time_tag=False,
                   overwrite_EXIF_gps_tag=False,
                   overwrite_EXIF_direction_tag=False,
                   overwrite_EXIF_orientation_tag=False):

    # sanity check if video file is passed
    if video_import_path and not os.path.isdir(video_import_path):
        print("Error, video path " + video_import_path +
              " does not exist, exiting...")
        sys.exit(1)

    # in case of video processing, adjust the import path
    if video_import_path:
        # set sampling path
        video_sampling_path = "mapillary_sampled_video_frames"
        import_path = os.path.join(
            os.path.abspath(import_path),
            video_sampling_path) if import_path else os.path.join(
                os.path.abspath(video_import_path), video_sampling_path)

    # basic check for all
    if not import_path or not os.path.isdir(import_path):
        print("Error, import directory " + import_path +
              " does not exist, exiting...")
        sys.exit(1)

    # get list of file to process
    process_file_list = processing.get_process_file_list(
        import_path, "mapillary_image_description", rerun, verbose,
        skip_subfolders)
    if not len(process_file_list):
        print("No images to run process finalization")
        print(
            "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun"
        )

    for image in tqdm(
            process_file_list,
            desc="Inserting mapillary image description in image EXIF"):
        # check the processing logs
        log_root = uploader.log_rootpath(image)

        duplicate_path = os.path.join(log_root, "duplicate")

        if os.path.isfile(duplicate_path):
            continue

        final_mapillary_image_description = processing.get_final_mapillary_image_description(
            log_root, image, master_upload, verbose, skip_EXIF_insert,
            keep_original, overwrite_all_EXIF_tags, overwrite_EXIF_time_tag,
            overwrite_EXIF_gps_tag, overwrite_EXIF_direction_tag,
            overwrite_EXIF_orientation_tag)

        processing.create_and_log_process(image,
                                          "mapillary_image_description",
                                          "success",
                                          final_mapillary_image_description,
                                          verbose=verbose)

    print("Sub process ended")
def process_upload_params(import_path,
                          user_name,
                          master_upload=False,
                          verbose=False,
                          rerun=False,
                          skip_subfolders=False):
    # basic check for all
    import_path = os.path.abspath(import_path)
    if not os.path.isdir(import_path):
        print("Error, import directory " + import_path +
              " doesnt not exist, exiting...")
        sys.exit()

    # get list of file to process
    process_file_list = processing.get_process_file_list(import_path,
                                                         "upload_params_process",
                                                         rerun,
                                                         verbose,
                                                         skip_subfolders)
    if not len(process_file_list):
        print("No images to run upload params process")
        print("If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun")

    # sanity checks
    if not user_name:
        print("Error, must provide a valid user name, exiting...")
        processing.create_and_log_process_in_list(process_file_list,
                                                  import_path,
                                                  "upload_params_process"
                                                  "failed",
                                                  verbose)
        return

    if not master_upload:
        try:
            credentials = uploader.authenticate_user(user_name)
        except:
            print("Error, user authentication failed for user " + user_name)
            processing.create_and_log_process_in_list(process_file_list,
                                                      import_path,
                                                      "upload_params_process"
                                                      "failed",
                                                      verbose)
            return
        if credentials == None or "user_upload_token" not in credentials or "user_permission_hash" not in credentials or "user_signature_hash" not in credentials:
            print("Error, user authentication failed for user " + user_name)
            processing.create_and_log_process_in_list(process_file_list,
                                                      import_path,
                                                      "upload_params_process"
                                                      "failed",
                                                      verbose)
            return

        user_upload_token = credentials["user_upload_token"]
        user_permission_hash = credentials["user_permission_hash"]
        user_signature_hash = credentials["user_signature_hash"]
        user_key = credentials["MAPSettingsUserKey"]

    for image in process_file_list:
        # check the status of the sequence processing
        log_root = uploader.log_rootpath(import_path,
                                         image)
        duplicate_flag_path = os.path.join(log_root,
                                           "duplicate")
        upload_params_path = os.path.join(
            log_root, "upload_params_process.json")

        if os.path.isfile(upload_params_path):
            os.remove(upload_params_path)

        if os.path.isfile(duplicate_flag_path) or master_upload:
            continue

        upload_params_properties = processing.get_upload_param_properties(log_root,
                                                                          image,
                                                                          user_name,
                                                                          user_upload_token,
                                                                          user_permission_hash,
                                                                          user_signature_hash,
                                                                          user_key,
                                                                          verbose)
        processing.create_and_log_process(image,
                                          import_path,
                                          "upload_params_process",
                                          "success",
                                          upload_params_properties,
                                          verbose=verbose)
        # flag manual upload
        log_manual_upload = os.path.join(
            log_root, "manual_upload")
        open(log_manual_upload, 'a').close()
Beispiel #39
0
def upload(import_path, manual_done=False, verbose=False, skip_subfolders=False):
    '''
    Upload local images to Mapillary
    Args:
        import_path: Directory path to where the images are stored.
        verbose: Print extra warnings and errors.
        skip_subfolders: Skip images stored in subdirectories.
        manual_done: Prompt user to confirm upload finalization.

    Returns:
        Images are uploaded to Mapillary and flagged locally as uploaded.
    '''

    # basic check for all
    import_path = os.path.abspath(import_path)
    if not os.path.isdir(import_path):
        print("Error, import directory " + import_path +
              " doesnt not exist, exiting...")
        sys.exit()

    # get list of file to process
    total_file_list = uploader.get_total_file_list(
        import_path, skip_subfolders)
    upload_file_list = uploader.get_upload_file_list(
        import_path, skip_subfolders)
    failed_file_list = uploader.get_failed_upload_file_list(
        import_path, skip_subfolders)
    success_file_list = uploader.get_success_upload_file_list(
        import_path, skip_subfolders)

    if len(success_file_list) == len(total_file_list):
        print("All images have already been uploaded")
        sys.exit()

    if len(failed_file_list):
        upload_failed = raw_input(
            "Retry uploading previously failed image uploads? [y/n]: ")
        # if yes, add images to the upload list
        if upload_failed in ["y", "Y", "yes", "Yes"]:
            upload_file_list.extend(failed_file_list)

    # verify the images in the upload list, they need to have the image
    # description and certain MAP properties
    upload_file_list = [f for f in upload_file_list if verify_mapillary_tag(f)]

    if not len(upload_file_list):
        print("No images to upload.")
        print('Please check if all images contain the required Mapillary metadata. If not, you can use "mapillary_tools process" to add them')
        sys.exit()

    # get upload params
    params = {}
    for image in total_file_list:
        log_root = uploader.log_rootpath(import_path, image)
        upload_params_path = os.path.join(
            log_root, "upload_params_process.json")
        if os.path.isfile(upload_params_path):
            with open(upload_params_path, "rb") as jf:
                params[image] = json.load(
                    jf, object_hook=uploader.ascii_encode_dict)

    # inform how many images are to be uploaded and how many are being skipped
    # from upload
    print("Uploading {} images with valid mapillary tags (Skipping {})".format(
        len(upload_file_list), len(total_file_list) - len(upload_file_list)))

    # call the actual upload, passing the list of images, the root of the
    # import and the upload params
    uploader.upload_file_list(upload_file_list, import_path, params)

    # finalize manual uploads if necessary
    finalize_file_list = uploader.get_finalize_file_list(
        import_path, skip_subfolders)

    # if manual uploads a DONE file needs to be uploaded to let the harvester
    # know the sequence is done uploading
    if len(finalize_file_list):
        finalize_all = 1
        if manual_done:
            finalize_all = uploader.prompt_to_finalize("uploads")
        if finalize_all:
            # get the s3 locations of the sequences
            finalize_params = uploader.process_upload_finalization(
                finalize_file_list, params)
            uploader.finalize_upload(finalize_params)
            # flag finalization for each file
            uploader.flag_finalization(import_path, finalize_file_list)
        else:
            print("Uploads will not be finalized.")
            print("If you wish to finalize your uploads, run the upload tool again.")
            sys.exit()

    uploader.print_summary(upload_file_list)
def insert_MAPJson(import_path,
                   master_upload=False,
                   verbose=False,
                   rerun=False,
                   skip_subfolders=False,
                   skip_EXIF_insert=False,
                   keep_original=False,
                   video_import_path=None,
                   overwrite_all_EXIF_tags=False,
                   overwrite_EXIF_time_tag=False,
                   overwrite_EXIF_gps_tag=False,
                   overwrite_EXIF_direction_tag=False,
                   overwrite_EXIF_orientation_tag=False):

    # sanity check if video file is passed
    if video_import_path and not os.path.isdir(video_import_path) and not os.path.isfile(video_import_path):
        print("Error, video path " + video_import_path +
              " does not exist, exiting...")
        sys.exit(1)

    # in case of video processing, adjust the import path
    if video_import_path:
        # set sampling path
        video_sampling_path = "mapillary_sampled_video_frames"
        video_dirname = video_import_path if os.path.isdir(
            video_import_path) else os.path.dirname(video_import_path)
        import_path = os.path.join(os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join(
            os.path.abspath(video_dirname), video_sampling_path)

    # basic check for all
    if not import_path or not os.path.isdir(import_path):
        print_error("Error, import directory " + import_path +
                    " does not exist, exiting...")
        sys.exit(1)

    # get list of file to process
    process_file_list = processing.get_process_file_list(import_path,
                                                         "mapillary_image_description",
                                                         rerun,
                                                         verbose,
                                                         skip_subfolders)
    if not len(process_file_list):
        print("No images to run process finalization")
        print("If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun")

    for image in tqdm(process_file_list, desc="Inserting mapillary image description in image EXIF"):
        # check the processing logs
        log_root = uploader.log_rootpath(image)

        duplicate_path = os.path.join(log_root,
                                      "duplicate")

        if os.path.isfile(duplicate_path):
            continue

        final_mapillary_image_description = processing.get_final_mapillary_image_description(log_root,
                                                                                             image,
                                                                                             master_upload,
                                                                                             verbose,
                                                                                             skip_EXIF_insert,
                                                                                             keep_original,
                                                                                             overwrite_all_EXIF_tags,
                                                                                             overwrite_EXIF_time_tag,
                                                                                             overwrite_EXIF_gps_tag,
                                                                                             overwrite_EXIF_direction_tag,
                                                                                             overwrite_EXIF_orientation_tag)

        processing.create_and_log_process(image,
                                          "mapillary_image_description",
                                          "success",
                                          final_mapillary_image_description,
                                          verbose=verbose)

    print("Sub process ended")
def process_upload_params(import_path,
                          user_name,
                          master_upload=False,
                          verbose=False,
                          rerun=False,
                          skip_subfolders=False):
    # basic check for all
    import_path = os.path.abspath(import_path)
    if not os.path.isdir(import_path):
        print("Error, import directory " + import_path +
              " doesnt not exist, exiting...")
        sys.exit()

    # get list of file to process
    process_file_list = processing.get_process_file_list(
        import_path, "upload_params_process", rerun, verbose, skip_subfolders)
    if not len(process_file_list):
        print("No images to run upload params process")
        print(
            "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun"
        )

    # sanity checks
    if not user_name:
        print("Error, must provide a valid user name, exiting...")
        processing.create_and_log_process_in_list(
            process_file_list, "upload_params_process"
            "failed", verbose)
        return

    if not master_upload:
        try:
            credentials = uploader.authenticate_user(user_name)
        except:
            print("Error, user authentication failed for user " + user_name)
            processing.create_and_log_process_in_list(
                process_file_list, "upload_params_process"
                "failed", verbose)
            return
        if credentials == None or "user_upload_token" not in credentials or "user_permission_hash" not in credentials or "user_signature_hash" not in credentials:
            print("Error, user authentication failed for user " + user_name)
            processing.create_and_log_process_in_list(
                process_file_list, "upload_params_process"
                "failed", verbose)
            return

        user_upload_token = credentials["user_upload_token"]
        user_permission_hash = credentials["user_permission_hash"]
        user_signature_hash = credentials["user_signature_hash"]
        user_key = credentials["MAPSettingsUserKey"]

    for image in process_file_list:
        # check the status of the sequence processing
        log_root = uploader.log_rootpath(image)
        duplicate_flag_path = os.path.join(log_root, "duplicate")
        upload_params_path = os.path.join(log_root,
                                          "upload_params_process.json")

        if os.path.isfile(upload_params_path):
            os.remove(upload_params_path)

        if os.path.isfile(duplicate_flag_path) or master_upload:
            continue

        upload_params_properties = processing.get_upload_param_properties(
            log_root, image, user_name, user_upload_token,
            user_permission_hash, user_signature_hash, user_key, verbose)
        processing.create_and_log_process(image,
                                          "upload_params_process",
                                          "success",
                                          upload_params_properties,
                                          verbose=verbose)
        # flag manual upload
        log_manual_upload = os.path.join(log_root, "manual_upload")
        open(log_manual_upload, 'a').close()
def post_process(import_path,
                 split_import_path=None,
                 video_import_path=None,
                 summarize=False,
                 move_all_images=False,
                 move_duplicates=False,
                 move_uploaded=False,
                 move_sequences=False,
                 save_as_json=False,
                 list_file_status=False,
                 push_images=False,
                 skip_subfolders=False,
                 verbose=False,
                 save_local_mapping=False):

    # return if nothing specified
    if not any([summarize, move_all_images, list_file_status, push_images, move_duplicates, move_uploaded, save_local_mapping, move_sequences]):
        print("No post processing action specified.")
        return

    # sanity check if video file is passed
    if video_import_path and not os.path.isdir(video_import_path) and not os.path.isfile(video_import_path):
        print("Error, video path " + video_import_path +
              " does not exist, exiting...")
        sys.exit(1)
    if move_all_images:
        move_sequences = True
        move_duplicates = True
        move_uploaded = True
    # in case of video processing, adjust the import path
    if video_import_path:
        # set sampling path
        video_sampling_path = "mapillary_sampled_video_frames"
        video_dirname = video_import_path if os.path.isdir(
            video_import_path) else os.path.dirname(video_import_path)
        import_path = os.path.join(os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join(
            os.path.abspath(video_dirname), video_sampling_path)

    # basic check for all
    if not import_path or not os.path.isdir(import_path):
        print("Error, import directory " + import_path +
              " does not exist, exiting...")
        sys.exit(1)
    if save_local_mapping:
        local_mapping = save_local_mapping(import_path)
        with open(local_mapping_filepath, "w") as csvfile:
            csvwriter = csv.writer(csvfile, delimiter=",")
            for row in local_mapping:
                csvwriter.writerow(row)
    if push_images:
        to_be_pushed_files = uploader.get_success_only_manual_upload_file_list(
            import_path, skip_subfolders)
        params = {}
        for image in tqdm(to_be_pushed_files, desc="Pushing images"):
            log_root = uploader.log_rootpath(image)
            upload_params_path = os.path.join(
                log_root, "upload_params_process.json")
            if os.path.isfile(upload_params_path):
                with open(upload_params_path, "rb") as jf:
                    params[image] = json.load(
                        jf, object_hook=uploader.ascii_encode_dict)

        # get the s3 locations of the sequences
        finalize_params = uploader.process_upload_finalization(
            to_be_pushed_files, params)
        uploader.finalize_upload(finalize_params)
        # flag finalization for each file
        uploader.flag_finalization(to_be_pushed_files)

    if any([summarize, list_file_status, move_uploaded]):
        # upload logs
        uploaded_files = uploader.get_success_upload_file_list(
            import_path, skip_subfolders)
        uploaded_files_count = len(uploaded_files)
        failed_upload_files = uploader.get_failed_upload_file_list(
            import_path, skip_subfolders)
        failed_upload_files_count = len(failed_upload_files)
        to_be_finalized_files = uploader.get_finalize_file_list(import_path)
        to_be_finalized_files_count = len(to_be_finalized_files)
        to_be_uploaded_files = uploader.get_upload_file_list(
            import_path, skip_subfolders)
        to_be_uploaded_files_count = len(to_be_uploaded_files)
    if any([summarize, move_sequences]):
        total_files = uploader.get_total_file_list(import_path)
        total_files_count = len(total_files)
    if any([summarize, move_duplicates, list_file_status]):
        duplicates_file_list = processing.get_duplicate_file_list(
            import_path, skip_subfolders)
        duplicates_file_list_count = len(duplicates_file_list)
    if summarize:
        summary_dict = {}
        summary_dict["total images"] = total_files_count
        summary_dict["upload summary"] = {
            "successfully uploaded": uploaded_files_count,
            "failed uploads": failed_upload_files_count,
            "uploaded to be finalized": to_be_finalized_files_count
        }
        # process logs
        summary_dict["process summary"] = {}
        process_steps = ["user_process", "import_meta_process", "geotag_process",
                         "sequence_process", "upload_params_process", "mapillary_image_description"]
        process_status = ["success", "failed"]
        for step in process_steps:

            process_success = len(processing.get_process_status_file_list(
                import_path, step, "success", skip_subfolders))
            process_failed = len(processing.get_process_status_file_list(
                import_path, step, "failed", skip_subfolders))
            summary_dict["process summary"][step] = {
                "failed": process_failed,
                "success": process_success
            }
        summary_dict["process summary"]["duplicates"] = duplicates_file_list_count
        summary_dict["process summary"]["processed_not_yet_uploaded"] = to_be_uploaded_files_count
        print("Import summary for import path {} :".format(import_path))
        print(json.dumps(summary_dict, indent=4))

        ipc.send('summary', summary_dict)

        if save_as_json:
            try:
                processing.save_json(summary_dict, os.path.join(
                    import_path, "mapillary_import_summary.json"))
            except Exception as e:
                print("Could not save summary into json at {}, due to {}".format(
                    os.path.join(import_path, "mapillary_import_summary.json"), e))
    if list_file_status:
        status_list_dict = {}
        status_list_dict["successfully uploaded"] = uploaded_files
        status_list_dict["failed uploads"] = failed_upload_files
        status_list_dict["uploaded to be finalized"] = to_be_finalized_files
        status_list_dict["duplicates"] = duplicates_file_list
        status_list_dict["processed_not_yet_uploaded"] = to_be_uploaded_files
        print("")
        print("List of file status for import path {} :".format(import_path))
        print(json.dumps(status_list_dict, indent=4))
        if save_as_json:
            try:
                processing.save_json(status_list_dict, os.path.join(
                    import_path, "mapillary_import_image_status_list.json"))
            except Exception as e:
                print("Could not save image status list into json at {}, due to {}".format(
                    os.path.join(import_path, "mapillary_import_image_status_list.json"), e))
    split_import_path = split_import_path if split_import_path else import_path
    if any([move_sequences, move_duplicates, move_uploaded]):
        if not os.path.isdir(split_import_path):
            print("Split import path {} does not exist.".format(
                split_import_path))
            sys.exit(1)

    destination_mapping = {}
    if move_duplicates:
        for image in duplicates_file_list:
            destination_mapping[image] = {"basic": ["duplicates"]}
    if move_uploaded:
        for image in uploaded_files:
            if image in destination_mapping:
                destination_mapping[image]["basic"].append("uploaded")
            else:
                destination_mapping[image] = {"basic": ["uploaded"]}
        for image in failed_upload_files:
            if image in destination_mapping:
                destination_mapping[image]["basic"].append("failed_upload")
            else:
                destination_mapping[image] = {"basic": ["failed_upload"]}
        for image in to_be_finalized_files:
            if image in destination_mapping:
                destination_mapping[image]["basic"].append(
                    "uploaded_not_finalized")
            else:
                destination_mapping[image] = {
                    "basic": ["uploaded_not_finalized"]}
        for image in to_be_uploaded_files:
            if image in destination_mapping:
                destination_mapping[image]["basic"].append("to_be_uploaded")
            else:
                destination_mapping[image] = {"basic": ["to_be_uploaded"]}
    if move_sequences:
        destination_mapping = map_images_to_sequences(
            destination_mapping, total_files)
    for image in destination_mapping:
        basic_destination = destination_mapping[image]["basic"] if "basic" in destination_mapping[image] else [
        ]
        sequence_destination = destination_mapping[image][
            "sequence"] if "sequence" in destination_mapping[image] else ""
        image_destination_path = os.path.join(*([split_import_path] + basic_destination + [
                                              os.path.dirname(image[len(os.path.abspath(import_path)) + 1:])] + [sequence_destination, os.path.basename(image)]))
        if not os.path.isdir(os.path.dirname(image_destination_path)):
            os.makedirs(os.path.dirname(image_destination_path))
        os.rename(image, image_destination_path)
        image_logs_dir = uploader.log_rootpath(image)
        destination_logs_dir = uploader.log_rootpath(image_destination_path)
        if not os.path.isdir(image_logs_dir):
            continue
        if not os.path.isdir(os.path.dirname(destination_logs_dir)):
            os.makedirs(os.path.dirname(destination_logs_dir))
        os.rename(image_logs_dir, destination_logs_dir)
Beispiel #43
0
def process_sequence_properties(import_path,
                                cutoff_distance=600.0,
                                cutoff_time=60.0,
                                interpolate_directions=False,
                                flag_duplicates=False,
                                duplicate_distance=0.1,
                                duplicate_angle=5,
                                offset_angle=0.0,
                                verbose=False,
                                rerun=False,
                                skip_subfolders=False,
                                video_import_path=None):

    # sanity check if video file is passed
    if video_import_path and not os.path.isdir(video_import_path):
        print("Error, video path " + video_import_path +
              " does not exist, exiting...")
        sys.exit(1)

    # in case of video processing, adjust the import path
    if video_import_path:
        # set sampling path
        video_sampling_path = "mapillary_sampled_video_frames"
        import_path = os.path.join(
            os.path.abspath(import_path),
            video_sampling_path) if import_path else os.path.join(
                os.path.abspath(video_import_path), video_sampling_path)

    # basic check for all
    if not import_path or not os.path.isdir(import_path):
        print("Error, import directory " + import_path +
              " does not exist, exiting...")
        sys.exit(1)

    sequences = []
    if skip_subfolders:
        process_file_list = processing.get_process_file_list(
            import_path, "sequence_process", rerun, verbose, True, import_path)
        if not len(process_file_list):
            if verbose:
                print("No images to run sequence process in root " +
                      import_path)
                print(
                    "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun"
                )
        else:
            # LOAD TIME AND GPS POINTS ------------------------------------
            file_list, capture_times, lats, lons, directions = processing.load_geotag_points(
                process_file_list, verbose)
            # ---------------------------------------

            # SPLIT SEQUENCES --------------------------------------
            if len(capture_times) and len(lats) and len(lons):
                sequences.extend(
                    processing.split_sequences(capture_times, lats, lons,
                                               file_list, directions,
                                               cutoff_time, cutoff_distance,
                                               verbose))
        # ---------------------------------------
    else:
        # sequence limited to the root of the files
        for root, dirs, files in os.walk(import_path):
            if os.path.join(".mapillary", "logs") in root:
                continue
            if len(files):
                process_file_list = processing.get_process_file_list(
                    import_path, "sequence_process", rerun, verbose, True,
                    root)
                if not len(process_file_list):
                    if verbose:
                        print("No images to run sequence process in root " +
                              root)
                        print(
                            "If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun"
                        )
                    continue
                # LOAD TIME AND GPS POINTS ------------------------------------
                file_list, capture_times, lats, lons, directions = processing.load_geotag_points(
                    process_file_list, verbose)
                # ---------------------------------------
                # SPLIT SEQUENCES --------------------------------------
                if len(capture_times) and len(lats) and len(lons):
                    sequences.extend(
                        processing.split_sequences(capture_times, lats, lons,
                                                   file_list, directions,
                                                   cutoff_time,
                                                   cutoff_distance, verbose))
                # ---------------------------------------
    if flag_duplicates:
        if verbose:
            print(
                "Flagging images as duplicates if consecutive distance difference less than {} and angle difference less than {}"
                .format(duplicate_distance, duplicate_angle))

    # process for each sequence
    for sequence in sequences:
        file_list = sequence["file_list"]
        directions = sequence["directions"]
        latlons = sequence["latlons"]
        capture_times = sequence["capture_times"]

        # COMPUTE DIRECTIONS --------------------------------------
        interpolated_directions = [
            compute_bearing(ll1[0], ll1[1], ll2[0], ll2[1])
            for ll1, ll2 in zip(latlons[:-1], latlons[1:])
        ]
        if len(interpolated_directions):
            interpolated_directions.append(interpolated_directions[-1])
        else:
            interpolated_directions.append(directions[-1])
        # use interpolated directions if direction not available or if flag for
        # interpolate_directions
        for i, d in enumerate(directions):
            directions[i] = d if (
                d is not None and not interpolate_directions
            ) else (interpolated_directions[i] + offset_angle) % 360.0
        # ---------------------------------------

        # INTERPOLATE TIMESTAMPS, in case of identical timestamps
        capture_times = processing.interpolate_timestamp(capture_times)

        final_file_list = file_list[:]
        final_directions = directions[:]
        final_capture_times = capture_times[:]
        # FLAG DUPLICATES --------------------------------------
        if flag_duplicates:
            final_file_list = [file_list[0]]
            final_directions = [directions[0]]
            final_capture_times = [capture_times[0]]
            prev_latlon = latlons[0]
            prev_direction = directions[0]
            for i, filename in enumerate(file_list[1:]):
                log_root = uploader.log_rootpath(filename)
                duplicate_flag_path = os.path.join(log_root, "duplicate")
                sequence_process_success_path = os.path.join(
                    log_root, "sequence_process_success")
                k = i + 1
                distance = gps_distance(latlons[k], prev_latlon)
                if directions[k] is not None and prev_direction is not None:
                    direction_diff = diff_bearing(directions[k],
                                                  prev_direction)
                else:
                    # dont use bearing difference if no bearings are
                    # available
                    direction_diff = 360
                if distance < duplicate_distance and direction_diff < duplicate_angle:
                    open(duplicate_flag_path, "w").close()
                    open(sequence_process_success_path, "w").close()
                    open(
                        sequence_process_success_path + "_" +
                        str(time.strftime("%Y_%m_%d_%H_%M_%S", time.gmtime())),
                        "w").close()
                else:
                    prev_latlon = latlons[k]
                    prev_direction = directions[k]
                    final_file_list.append(filename)
                    final_directions.append(directions[k])
                    final_capture_times.append(capture_times[k])
        # ---------------------------------------

        # FINALIZE ------------------------------------
        for i in range(0, len(final_file_list), MAX_SEQUENCE_LENGTH):
            finalize_sequence_processing(
                str(uuid.uuid4()), final_file_list[i:i + MAX_SEQUENCE_LENGTH],
                final_directions[i:i + MAX_SEQUENCE_LENGTH],
                final_capture_times[i:i + MAX_SEQUENCE_LENGTH], import_path,
                verbose)
    print("Sub process ended")
Beispiel #44
0
def upload(import_path, verbose=False, skip_subfolders=False, number_threads=None, max_attempts=None, video_import_path=None, dry_run=False,api_version=1.0):
    '''
    Upload local images to Mapillary
    Args:
        import_path: Directory path to where the images are stored.
        verbose: Print extra warnings and errors.
        skip_subfolders: Skip images stored in subdirectories.

    Returns:
        Images are uploaded to Mapillary and flagged locally as uploaded.
    '''
    # sanity check if video file is passed
    if video_import_path and (not os.path.isdir(video_import_path) and not os.path.isfile(video_import_path)):
        print("Error, video path " + video_import_path +
              " does not exist, exiting...")
        sys.exit(1)

    # in case of video processing, adjust the import path
    if video_import_path:
        # set sampling path
        video_sampling_path = "mapillary_sampled_video_frames"
        video_dirname = video_import_path if os.path.isdir(
            video_import_path) else os.path.dirname(video_import_path)
        import_path = os.path.join(os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join(
            os.path.abspath(video_dirname), video_sampling_path)

    # basic check for all
    if not import_path or not os.path.isdir(import_path):
        print("Error, import directory " + import_path +
              " does not exist, exiting...")
        sys.exit(1)

    # get list of file to process
    total_file_list = uploader.get_total_file_list(
        import_path, skip_subfolders)
    upload_file_list = uploader.get_upload_file_list(
        import_path, skip_subfolders)
    failed_file_list = uploader.get_failed_upload_file_list(
        import_path, skip_subfolders)
    success_file_list = uploader.get_success_upload_file_list(
        import_path, skip_subfolders)
    to_finalize_file_list = uploader.get_finalize_file_list(
        import_path, skip_subfolders)

    if len(success_file_list) == len(total_file_list):
        print("All images have already been uploaded")
    else:
        if len(failed_file_list):
            upload_failed = raw_input(
                "Retry uploading previously failed image uploads? [y/n]: ") if not ipc.is_enabled() else 'y'
            # if yes, add images to the upload list
            if upload_failed in ["y", "Y", "yes", "Yes"]:
                upload_file_list.extend(failed_file_list)

        # verify the images in the upload list, they need to have the image
        # description and certain MAP properties
        upload_file_list = [
            f for f in upload_file_list if verify_mapillary_tag(f)]

        if not len(upload_file_list) and not len(to_finalize_file_list):
            print("No images to upload.")
            print('Please check if all images contain the required Mapillary metadata. If not, you can use "mapillary_tools process" to add them')
            sys.exit(1)

        if len(upload_file_list):
            # get upload params for the manual upload images, group them per sequence
            # and separate direct upload images
            params = {}
            list_per_sequence_mapping = {}
            direct_upload_file_list = []
            for image in upload_file_list:
                log_root = uploader.log_rootpath(image)
                upload_params_path = os.path.join(
                    log_root, "upload_params_process.json")
                if os.path.isfile(upload_params_path):
                    with open(upload_params_path, "rb") as jf:
                        params[image] = json.load(
                            jf, object_hook=uploader.ascii_encode_dict)
                        sequence = params[image]["key"]
                        if sequence in list_per_sequence_mapping:
                            list_per_sequence_mapping[sequence].append(image)
                        else:
                            list_per_sequence_mapping[sequence] = [image]
                else:
                    direct_upload_file_list.append(image)

            # inform how many images are to be uploaded and how many are being skipped
            # from upload

            print("Uploading {} images with valid mapillary tags (Skipping {})".format(
                len(upload_file_list), len(total_file_list) - len(upload_file_list)))
            if api_version==2.0:
                uploder.uploadfile_list
            if len(direct_upload_file_list):
                uploader.upload_file_list_direct(
                    direct_upload_file_list, number_threads, max_attempts)
            for idx, sequence in enumerate(list_per_sequence_mapping):
                uploader.upload_file_list_manual(
                    list_per_sequence_mapping[sequence], params, idx, number_threads, max_attempts)
        if len(to_finalize_file_list):
            params = {}
            sequences = []
            for image in to_finalize_file_list:
                log_root = uploader.log_rootpath(image)
                upload_params_path = os.path.join(
                    log_root, "upload_params_process.json")
                if os.path.isfile(upload_params_path):
                    with open(upload_params_path, "rb") as jf:
                        image_params = json.load(
                            jf, object_hook=uploader.ascii_encode_dict)
                        sequence = image_params["key"]
                        if sequence not in sequences:
                            params[image] = image_params
                            sequences.append(sequence)
            for image in params:
                uploader.upload_done_file(**params[image])
            uploader.flag_finalization(to_finalize_file_list)

    uploader.print_summary(upload_file_list)