def test_removed_changes(): conf = settings.Settings(SETTINGS_FILE, DATA_FILE) crawler = filecrawler.DriveFileCrawler(conf, googledrive.GoogleDrive()) for change in crawler.get_last_removed(update_token=False): print(change) conf.exit()
def test_drivecrawler_folder(folder_id): db = database.GoogleDriveDB() conf = settings.Settings(SETTINGS_FILE, DATA_FILE) crawler = filecrawler.DriveFileCrawler(conf, googledrive.GoogleDrive()) for obj in crawler.get_ids_to_download_in_folder(folder_id): print(obj) conf.exit() db.close()
def test_drivecrawler_changes(): db = database.GoogleDriveDB() conf = settings.Settings(SETTINGS_FILE, DATA_FILE) g = googledrive.GoogleDrive() crawler = filecrawler.DriveFileCrawler(conf, g) change_date = datetime.datetime(2019, 5, 20) change_date = googledrive.convert_datetime_to_google_time(change_date) conf.data_file.set_last_download_sync_time(change_date) conf.data_file.set_last_download_change_token(989626) for obj in crawler.get_changes_to_download(): print(obj) g.exit() conf.exit() db.close()
def db_upload_test(path): db = database.GoogleDriveDB() conf = settings.Settings(SETTINGS_FILE, DATA_FILE) google = googledrive.GoogleDrive() file_crawler = filecrawler.LocalFileCrawler(conf) drive_uploader = uploader.DBDriveUploader(google, update_db=True) folder_id = make_folder_structure(path, drive_uploader, file_crawler) q = drive_uploader.start_upload_queue(n_threads=4) for fpath in file_crawler.get_files_to_sync(path): q.put(drive_uploader.DUQEntry(fpath)) drive_uploader.wait_for_queue(q) input("Press any key to clean up.") google.delete(folder_id) entry = database.unify_path(path) query = db.model.select().where(db.model.path.contains(entry)) for archive in query.iterator(): archive.delete_instance() conf.exit() db.close()
import sys import os import tempfile import time import pprint from pytools import printer, filetools from backuper import googledrive g = googledrive.GoogleDrive() def test_changes(): import pprint # token = 820841 token = g.get_start_page_token() print("Start token: ", token) folder_id = g.upload_directory("tests/") g.delete(folder_id) changes = g.get_changes( start_page_token=token, fields= "changes(file(id, name, mimeType, md5Checksum, modifiedTime, trashed, parents), fileId, removed)", include_removed=True) for change in changes:
import os from backuper import downloader from backuper import googledrive def dl_folder(google, folder_id, dest_path): dl = downloader.DriveDownloader(google, update_db=False) Entry = dl.DLQEntry q = dl.start_download_queue(n_threads=5) for dirpath, dirnames, filenames in google.walk_folder(folder_id, fields="files(id, md5Checksum, name)"): path = os.path.join(dest_path, dirpath[0]) dir_id = dirpath[1] # This is really only necessary for empty folders ... q.put(Entry(type="#folder", file_id=dir_id, path=path)) for file_resp in filenames: q.put(Entry(type="#file", file_id=file_resp['id'], path=path, filename=file_resp['name'], md5sum=file_resp['md5Checksum'])) dl.wait_for_queue(q) if __name__ == '__main__': dl_folder(googledrive.GoogleDrive(), "0B94xod46LwqkZlVnN2I1VVNCemc", "tests/")