def do_download_file(self, base_url: str, media_item: DatabaseMedia): """ Runs in a process pool and does a download of a single media item. """ if self.case_insensitive_fs: relative_folder = str(media_item.relative_folder).lower() filename = str(media_item.filename).lower() else: relative_folder = media_item.relative_folder filename = media_item.filename local_folder = self._root_folder / relative_folder local_full_path = local_folder / filename if media_item.is_video(): download_url = "{}=dv".format(base_url) timeout = self.video_timeout else: download_url = "{}=d".format(base_url) timeout = self.image_timeout temp_file = tempfile.NamedTemporaryFile(dir=local_folder, delete=False) t_path = Path(temp_file.name) try: response = self._session.get(download_url, stream=True, timeout=timeout) response.raise_for_status() shutil.copyfileobj(response.raw, temp_file) temp_file.close() temp_file = None response.close() t_path.rename(local_full_path) create_date = Utils.safe_timestamp(media_item.create_date) os.utime( str(local_full_path), ( Utils.safe_timestamp(media_item.modify_date).timestamp(), create_date.timestamp(), ), ) if _use_win_32: file_handle = win32file.CreateFile( str(local_full_path), win32file.GENERIC_WRITE, 0, None, win32con.OPEN_EXISTING, 0, None, ) win32file.SetFileTime(file_handle, *(create_date, ) * 3) file_handle.close() os.chmod(str(local_full_path), 0o666 & ~self.current_umask) except KeyboardInterrupt: log.debug("User cancelled download thread") raise finally: if temp_file: temp_file.close() if t_path.exists(): t_path.unlink()
def do_download_file(self, base_url: str, media_item: DatabaseMedia): """ Runs in a process pool and does a download of a single media item. """ local_folder = self._root_folder / media_item.relative_folder local_full_path = local_folder / media_item.filename if media_item.is_video(): download_url = '{}=dv'.format(base_url) timeout = self.video_timeout else: download_url = '{}=d'.format(base_url) timeout = self.image_timeout temp_file = tempfile.NamedTemporaryFile(dir=local_folder, delete=False) t_path = Path(temp_file.name) try: response = self._session.get(download_url, stream=True, timeout=timeout) response.raise_for_status() shutil.copyfileobj(response.raw, temp_file) temp_file.close() response.close() t_path.rename(local_full_path) os.utime(str(local_full_path), (Utils.safe_timestamp(media_item.modify_date), Utils.safe_timestamp(media_item.create_date))) except KeyboardInterrupt: log.debug("User cancelled download thread") raise finally: if t_path.exists(): t_path.unlink()
def fs_checks(root_folder: Path, args: dict): Utils.minimum_date(root_folder) # store the root folder filesystem checks globally for all to inspect do_check(root_folder, int(args.max_filename), bool(args.ntfs)) # check if symlinks are supported if not get_check().is_symlink: args.skip_albums = True # check if file system is case sensitive if not args.case_insensitive_fs: if not get_check().is_case_sensitive: args.case_insensitive_fs = True return args
def fs_checks(root_folder: Path, args: dict): Utils.minimum_date() Checks.get_max_path_length(root_folder) Checks.get_max_filename_length(root_folder) # check if symlinks are supported if not Checks.symlinks_supported(root_folder): args.skip_albums = True # check if file system is case sensitive if not args.case_insensitive_fs: if not Checks.is_case_sensitive(root_folder): args.case_insensitive_fs = True return args
def test_system_incremental(self): s = ts.SetupDbAndCredentials() args = [ "--start-date", TestAccount.start, "--end-date", TestAccount.end, "--skip-albums", "--index-only", ] s.test_setup("test_system_incremental", args=args, trash_db=True, trash_files=True) s.gp.start(s.parsed_args) db = LocalData(s.root) db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual( TestAccount.image_count_2016, count[0], "expected {} items in 2016".format(TestAccount.image_count_2016), ) # force an update to the 'most recently scanned file' record # (this is normally only set for complete scans and was tested in # test_sys_whole_library) db.set_scan_date(Utils.string_to_date("2017-01-01")) db.store() s = ts.SetupDbAndCredentials() args = ["--skip-albums", "--index-only"] s.test_setup("test_system_incremental", args=args) s.gp.start(s.parsed_args) # this should add in everything in 2017 onwards (21 files) db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() t = (TestAccount.image_count_2016 + TestAccount.item_count_2017 + TestAccount.item_count_2020) self.assertEqual( t, count[0], "expected file count from 2016 and 2017 to be {}".format(t)) d_date = db.get_scan_date() self.assertEqual(d_date.date(), TestAccount.latest_date) s = ts.SetupDbAndCredentials() args = ["--skip-albums", "--index-only", "--rescan"] s.test_setup("test_system_incremental", args=args) s.gp.start(s.parsed_args) # this should add in everything db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() t = TestAccount.image_count + TestAccount.video_count self.assertEqual( t, count[0], "expected a total of {} items after full sync".format(t))
def from_parm(cls, album_id, filename, size, start, end) -> G: new_row = cls.make(RemoteId=album_id, AlbumName=filename, Size=size, StartDate=start, EndDate=end, SyncDate=Utils.date_to_string(datetime.now()), Downloaded=0) return new_row
def get_scan_date(self) -> datetime: query = "SELECT LastIndex " "FROM Globals WHERE Id IS 1" self.cur.execute(query) res = self.cur.fetchone() d = res["LastIndex"] if d: last_date = Utils.string_to_date(d) else: last_date = None return last_date
def __init__(self, root_folder: Path, db: LocalData, photos_path="photos"): self._root_folder: Path = root_folder self._db: LocalData = db self._media_folder: Path = Path(photos_path) self.files_indexed: int = 0 self.files_index_skipped: int = 0 self.start_date: datetime = None self.end_date: datetime = None if db: self.latest_download = self._db.get_scan_date() or \ Utils.minimum_date() self.extractor = LocationExtract()
def test_system_inc_picasa(self, get_album): s = SetupDbAndCredentials() # mock get album to pretend a full scan has occurred on 2020-08-28 get_album.return_value = LocalData.AlbumsRow.make( SyncDate=Utils.string_to_date('2020-08-28 00:00:00')) args = ['--end-date', '2000-01-01', '--skip-drive', '--index-only'] s.test_setup('system_inc_picasa', args=args, trash_files=True) s.gp.start(s.parsed_args) db = LocalData(s.root) db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(count[0], 0) # TODO need to add some photos to the test account that make this more # meaningful. Currently they all have the same modified date 2017-09-18 # mock get album to pretend a full scan has occurred on 2017-09-17 get_album.return_value = LocalData.AlbumsRow.make( SyncDate=Utils.string_to_date('2017-09-17 00:00:00')) args = [ '--skip-drive', '--end-date', '2017-09-19', '--index-only', '--skip-video' ] s.test_setup('system_inc_picasa', args=args) s.gp.start(s.parsed_args) db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() # 4 albums with 26 entries, 10 are videos and 6 overlap = 10 self.assertEqual(count[0], 10) db.cur.execute("SELECT COUNT() FROM Albums;") count = db.cur.fetchone() self.assertEqual(count[0], 4)
def test_system_incremental(self): s = ts.SetupDbAndCredentials() args = [ '--start-date', '2016-01-01', '--end-date', '2017-01-01', '--skip-albums', '--index-only' ] s.test_setup('test_system_incremental', args=args, trash_db=True, trash_files=True) s.gp.start(s.parsed_args) db = LocalData(s.root) db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(10, count[0]) # force an update the 'most recently scanned file' record # (this is normally only set for complete scans and was tested in # test_sys_whole_library) db.set_scan_date(Utils.string_to_date("2017-01-01")) db.store() s = ts.SetupDbAndCredentials() args = ['--skip-albums', '--index-only'] s.test_setup('test_system_incremental', args=args) s.gp.start(s.parsed_args) # this should add in everything in 2017 (20 files) db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(30, count[0]) d_date = db.get_scan_date() self.assertEqual(d_date.date(), datetime.date(2017, 9, 26)) s = ts.SetupDbAndCredentials() args = ['--skip-albums', '--index-only', '--rescan'] s.test_setup('test_system_incremental', args=args) s.gp.start(s.parsed_args) # this should add in everything db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(80, count[0])
def __init__(self, api: RestClient, root_folder: Path, db: LocalData, photos_path: Path, use_flat_path: bool = False): self._api: RestClient = api self._root_folder: Path = root_folder self._db: LocalData = db self._media_folder: Path = Path(photos_path) self._use_flat_path = use_flat_path self.files_indexed: int = 0 self.files_index_skipped: int = 0 if db: self.latest_download = self._db.get_scan_date() or \ Utils.minimum_date() # attributes to be set after init # thus in theory one instance could do multiple indexes self.start_date: datetime = None self.end_date: datetime = None self.include_video: bool = True self.rescan: bool = False self.favourites = False
def set_scan_date(self, last_date: datetime): d = Utils.date_to_string(last_date) self.cur.execute('UPDATE Globals SET LastIndex=? ' 'WHERE Id IS 1', (d, ))
def setup(self, args: Namespace, db_path: Path): root_folder = Path(args.root_folder).absolute() photos_folder = Path(args.photos_path) albums_folder = Path(args.albums_path) compare_folder = None if args.compare_folder: compare_folder = Path(args.compare_folder).absolute() app_dirs = AppDirs(APP_NAME) self.data_store = LocalData(db_path, args.flush_index) credentials_file = db_path / ".gphotos.token" secret_file = Path(app_dirs.user_config_dir) / "client_secret.json" if args.new_token and credentials_file.exists(): credentials_file.unlink() scope = [ 'https://www.googleapis.com/auth/photoslibrary.readonly', 'https://www.googleapis.com/auth/photoslibrary.sharing', ] photos_api_url = 'https://photoslibrary.googleapis.com/$discovery' \ '/rest?version=v1' self.auth = Authorize(scope, credentials_file, secret_file) self.auth.authorize() self.google_photos_client = RestClient(photos_api_url, self.auth.session) self.google_photos_idx = GooglePhotosIndex(self.google_photos_client, root_folder, self.data_store, args.photos_path, args.use_flat_path) self.google_photos_down = GooglePhotosDownload( self.google_photos_client, root_folder, self.data_store) self.google_albums_sync = GoogleAlbumsSync( self.google_photos_client, root_folder, self.data_store, args.flush_index or args.retry_download or args.rescan, photos_folder, albums_folder, args.use_flat_path, args.use_hardlinks) self.location_update = LocationUpdate(root_folder, self.data_store, args.photos_path) if args.compare_folder: self.local_files_scan = LocalFilesScan(root_folder, compare_folder, self.data_store) self._start_date = Utils.string_to_date(args.start_date) self._end_date = Utils.string_to_date(args.end_date) self.google_photos_idx.start_date = self._start_date self.google_photos_idx.end_date = self._end_date self.google_albums_sync.shared_albums = not args.skip_shared_albums self.google_albums_sync.album_index = not args.no_album_index self.google_photos_down.start_date = self._start_date self.google_photos_down.end_date = self._end_date self.location_update.start_date = self._start_date self.location_update.end_date = self._end_date self.google_photos_idx.include_video = not args.skip_video self.google_photos_idx.rescan = args.rescan self.google_photos_idx.favourites = args.favourites_only self.google_photos_down.retry_download = args.retry_download self.google_albums_sync.album = args.album
def setup(self, args: Namespace, db_path: Path): root_folder = Path(args.root_folder).absolute() compare_folder = None if args.compare_folder: compare_folder = Path(args.compare_folder).absolute() app_dirs = AppDirs(APP_NAME) self.data_store = LocalData(db_path, args.flush_index) credentials_file = db_path / ".gphotos.token" if args.secret: secret_file = Path(args.secret) else: secret_file = Path(app_dirs.user_config_dir) / "client_secret.json" if args.new_token and credentials_file.exists(): credentials_file.unlink() scope = [ "https://www.googleapis.com/auth/photoslibrary.readonly", "https://www.googleapis.com/auth/photoslibrary.sharing", ] photos_api_url = ("https://photoslibrary.googleapis.com/$discovery" "/rest?version=v1") self.auth = Authorize(scope, credentials_file, secret_file, int(args.max_retries)) self.auth.authorize() settings = Settings(start_date=Utils.string_to_date(args.start_date), end_date=Utils.string_to_date(args.end_date), shared_albums=not args.skip_shared_albums, album_index=not args.no_album_index, use_start_date=args.album_date_by_first_photo, album=args.album, album_regex=args.album_regex, favourites_only=args.favourites_only, retry_download=args.retry_download, case_insensitive_fs=args.case_insensitive_fs, include_video=not args.skip_video, rescan=args.rescan, archived=args.archived, photos_path=Path(args.photos_path), albums_path=Path(args.albums_path), use_flat_path=args.use_flat_path, max_retries=int(args.max_retries), max_threads=int(args.max_threads), omit_album_date=args.omit_album_date, use_hardlinks=args.use_hardlinks, progress=args.progress, ntfs_override=args.ntfs) self.google_photos_client = RestClient(photos_api_url, self.auth.session) self.google_photos_idx = GooglePhotosIndex(self.google_photos_client, root_folder, self.data_store, settings) self.google_photos_down = GooglePhotosDownload( self.google_photos_client, root_folder, self.data_store, settings) self.google_albums_sync = GoogleAlbumsSync( self.google_photos_client, root_folder, self.data_store, args.flush_index or args.retry_download or args.rescan, settings, ) if args.compare_folder: self.local_files_scan = LocalFilesScan(root_folder, compare_folder, self.data_store)
def modify_date(self): return Utils.string_to_date(self.__album_xml.updated.text)
def create_date(self): return Utils.string_to_date(self.__album_xml.published.text)