Пример #1
0
def remove_blacklisted_paths(google):
    """Removes archived blacklisted paths from Google Drive and the database."""
    
    print("Deleting blacklisted files from Google Drive ...")
    
    db = database.GoogleDriveDB()
    archives = list(get_blacklisted_archives())
    for archive in progressbar.progressbar(archives):
        google.delete(archive.drive_id)
        logging.info("Removed {} ({}) from database and/or Google Drive.".format(archive.drive_id, archive.path))
        archive.delete_instance()
    db.close()
Пример #2
0
def delete_all_removed_from_local_db(google):
    """:WARNING: Delete files removed from disk from Google Drive and the database."""

    print("Deleting files removed from disk from Google Drive ...")
    
    # 403 errors are the enemy. There is nothing that can be done.
    # The official recommended strategy, exponential backoff doesn't work.
    # Requests must be intentionally throtteled to avoid getting stuck in a 403 loop.
    # For that reason, this function is single threaded and unbatched.

    db = database.GoogleDriveDB()
    archives = list(get_all_removed_from_local_db())
    for archive in progressbar.progressbar(archives):
        google.delete(archive.drive_id)
        logging.info("Removed {} ({}) from database and/or Google Drive.".format(archive.drive_id, archive.path))
        archive.delete_instance()
    db.close()
Пример #3
0
def delete_all_removed_from_local_db_batched(google):
    """:WARNING: Delete files removed from disk from Google Drive and the database."""

    print("\rDeleting files removed from disk from Google Drive ...")    
    
    RETRY_LIMIT = 5

    db = database.GoogleDriveDB()

    ids = { rem.drive_id for rem in get_all_removed_from_local_db() }
    retry_ids = set()
    retry_count = 0
    pbar = progressbar.progressbar(total=len(ids))

    def _batch_delete_callback(file_id, _, exception):
        nonlocal retry_count
        if exception is not None and exception.resp.status != 404:
            if exception.resp.status == 403:  # Rate limit exceeded (probably).
                if retry_count >= RETRY_LIMIT:
                    raise exception
                logging.warning("RETRYING:" + repr(exception))
                retry_ids.add(file_id)
                time.sleep(2**retry_count)
                retry_count += 1
            else:
                raise exception
        else:
            if exception is not None and exception.resp.status == 404:  # File does not exist.
                logging.warning("IGNORING: " + repr(exception))
            retry_count = 0
            archive = db.get("drive_id", file_id)
            pbar.update()
            logging.info("Removed {} ({}) from database and/or Google Drive.".format(archive.drive_id, archive.path))
            archive.delete_instance()
    
    google.batch_delete(ids, callback=_batch_delete_callback)
    while len(retry_ids) > 0:
        ids = set(retry_ids)
        retry_ids.clear()
        google.batch_delete(ids, callback=_batch_delete_callback)
    db.close()
Пример #4
0
def test_progress():
    with progressbar.progressbar(desc="Progress test") as p:
        for step in range(500):
            p.set_progress(pow(step / 500, 3))
            time.sleep(2 / 500)
Пример #5
0
def test_length():
    for i in progressbar.progressbar(range(10), desc="A" * 999, max_width=100):
        fuzz()
Пример #6
0
def test_manual():
    N = 20
    with progressbar.progressbar(total=N, desc="Manual test", show_time=False) as p:
        for i in range(N):
            fuzz()
            p.update()
Пример #7
0
def test_dummy():
    with progressbar.progressbar(desc="Dummy test", show_time=False, bar_width=8) as p:
        for i in range(20):
            fuzz()
            p.update()
Пример #8
0
def test_iterator():
    iterable = [i for i in range(20)]
    for i in progressbar.progressbar(iterable, desc="Iterable test"):
        fuzz()