Ejemplo n.º 1
0
def test_removed_changes():
    conf = settings.Settings(SETTINGS_FILE, DATA_FILE)
    crawler = filecrawler.DriveFileCrawler(conf, googledrive.GoogleDrive())

    for change in crawler.get_last_removed(update_token=False):
        print(change)

    conf.exit()
Ejemplo n.º 2
0
def get_blacklisted_archives():
    SETTINGS_FILE = "_settings.ini"
    DATA_FILE = "_backuper.ini"
    conf = settings.Settings(SETTINGS_FILE, DATA_FILE)
    with database.GoogleDriveDB() as db:
        for archive in db.model.select().iterator():
            if conf.is_blacklisted_parent(archive.path, conf.sync_dirs):
                yield archive
Ejemplo n.º 3
0
def test_drivecrawler_folder(folder_id):
    db = database.GoogleDriveDB()
    conf = settings.Settings(SETTINGS_FILE, DATA_FILE)
    crawler = filecrawler.DriveFileCrawler(conf, googledrive.GoogleDrive())
    
    for obj in crawler.get_ids_to_download_in_folder(folder_id):
        print(obj)

    conf.exit()
    db.close()
Ejemplo n.º 4
0
def test_localfilecrawler():
    db = database.GoogleDriveDB()
    conf = settings.Settings(SETTINGS_FILE, DATA_FILE)
    crawler = filecrawler.LocalFileCrawler(conf)

    for p in crawler.get_folders_to_sync("tests/"):
        print(p)

    for p in crawler.get_files_to_sync("tests/"):
        print(p)

    for p in crawler.get_all_paths_to_sync("tests/"):
        print(p)

    conf.exit()
    db.close()
Ejemplo n.º 5
0
def test_drivecrawler_changes():
    db = database.GoogleDriveDB()
    conf = settings.Settings(SETTINGS_FILE, DATA_FILE)
    g = googledrive.GoogleDrive()
    crawler = filecrawler.DriveFileCrawler(conf, g)
    
    change_date = datetime.datetime(2019, 5, 20)
    change_date = googledrive.convert_datetime_to_google_time(change_date)
    conf.data_file.set_last_download_sync_time(change_date)
    conf.data_file.set_last_download_change_token(989626)

    for obj in crawler.get_changes_to_download():
        print(obj)

    g.exit()
    conf.exit()
    db.close()
Ejemplo n.º 6
0
def db_upload_test(path):
    db = database.GoogleDriveDB()
    conf = settings.Settings(SETTINGS_FILE, DATA_FILE)
    google = googledrive.GoogleDrive()
    file_crawler = filecrawler.LocalFileCrawler(conf)

    drive_uploader = uploader.DBDriveUploader(google, update_db=True)
    folder_id = make_folder_structure(path, drive_uploader, file_crawler)

    q = drive_uploader.start_upload_queue(n_threads=4)
    for fpath in file_crawler.get_files_to_sync(path):
        q.put(drive_uploader.DUQEntry(fpath))
    drive_uploader.wait_for_queue(q)

    input("Press any key to clean up.")
    google.delete(folder_id)
    entry = database.unify_path(path)
    query = db.model.select().where(db.model.path.contains(entry))
    for archive in query.iterator():
        archive.delete_instance()

    conf.exit()
    db.close()
Ejemplo n.º 7
0
import os
from backuper import settings

SETTINGS_FILE = "tests/test_settings.ini"
DATA_FILE = "tests/test_backuper.ini"

conf = settings.Settings(SETTINGS_FILE, DATA_FILE)


def print_unified_paths(c, section, option):
    print("[{}]/[{}]".format(section, option))
    for path in c.get_unified_paths(section, option):
        print(path)


def print_ini(c):
    for section in c.sections():
        print("[{}]".format(section))
        for item in c.items(section):
            print(item)


def print_settings_file():
    print(conf.user_settings_file.file_path)
    print_ini(conf.user_settings_file)


def print_data_file():
    print(conf.data_file.file_path)
    print_ini(conf.data_file)