def test_system_incremental(self): s = ts.SetupDbAndCredentials() args = [ "--start-date", TestAccount.start, "--end-date", TestAccount.end, "--skip-albums", "--index-only", ] s.test_setup("test_system_incremental", args=args, trash_db=True, trash_files=True) s.gp.start(s.parsed_args) db = LocalData(s.root) db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual( TestAccount.image_count_2016, count[0], "expected {} items in 2016".format(TestAccount.image_count_2016), ) # force an update to the 'most recently scanned file' record # (this is normally only set for complete scans and was tested in # test_sys_whole_library) db.set_scan_date(Utils.string_to_date("2017-01-01")) db.store() s = ts.SetupDbAndCredentials() args = ["--skip-albums", "--index-only"] s.test_setup("test_system_incremental", args=args) s.gp.start(s.parsed_args) # this should add in everything in 2017 onwards (21 files) db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() t = (TestAccount.image_count_2016 + TestAccount.item_count_2017 + TestAccount.item_count_2020) self.assertEqual( t, count[0], "expected file count from 2016 and 2017 to be {}".format(t)) d_date = db.get_scan_date() self.assertEqual(d_date.date(), TestAccount.latest_date) s = ts.SetupDbAndCredentials() args = ["--skip-albums", "--index-only", "--rescan"] s.test_setup("test_system_incremental", args=args) s.gp.start(s.parsed_args) # this should add in everything db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() t = TestAccount.image_count + TestAccount.video_count self.assertEqual( t, count[0], "expected a total of {} items after full sync".format(t))
def test_do_delete(self): s = ts.SetupDbAndCredentials() args = [ '--start-date', '2017-01-01', '--end-date', '2018-01-01', '--skip-video', '--skip-albums', '--do-delete' ] s.test_setup('test_do_delete', args=args, trash_db=True, trash_files=True) s.gp.start(s.parsed_args) pat = str(photos_root / '2017' / '??' / '*.[JjpP]*') files = sorted(s.root.glob(pat)) self.assertEqual(10, len(files)) db = LocalData(s.root) # noinspection SqlWithoutWhere db.cur.execute("DELETE FROM SyncFiles;") db.store() args.append('--skip-index') s = ts.SetupDbAndCredentials() s.test_setup('test_do_delete', args=args) s.gp.start(s.parsed_args) # should have removed all files files = sorted(s.root.glob(pat)) self.assertEqual(0, len(files))
def test_shared_albums(self): """Download favourite images in test library. """ s = ts.SetupDbAndCredentials() args = ['--skip-files'] s.test_setup('test_shared_albums', args=args, trash_files=True, trash_db=True) s.gp.start(s.parsed_args) with LocalData(s.root) as db: db.cur.execute("SELECT COUNT() FROM AlbumFiles") count = db.cur.fetchone() self.assertEqual(56, count[0]) s = ts.SetupDbAndCredentials() args = ['--skip-files', '--skip-shared-albums'] s.test_setup('test_shared_albums', args=args, trash_files=True, trash_db=True) s.gp.start(s.parsed_args) with LocalData(s.root) as db: db.cur.execute("SELECT COUNT() FROM AlbumFiles") count = db.cur.fetchone() self.assertEqual(50, count[0])
def test_system_retry_download(self): s = ts.SetupDbAndCredentials() # note we do index albums because there was a bug on retrying # downloads with albums enabled args = ['--start-date', '2017-01-01', '--end-date', '2018-01-01', '--skip-video'] s.test_setup('test_system_retry_download', args=args, trash_db=True, trash_files=True) s.gp.start(s.parsed_args) pat = str(photos_root / '2017' / '??' / '*.[JjpP]*') files = sorted(s.root.glob(pat)) self.assertEqual(15, len(files)) files[0].unlink() files = sorted(s.root.glob(pat)) self.assertEqual(14, len(files)) # re-run should not download since file is marked as downloaded s = ts.SetupDbAndCredentials() s.test_setup('test_system_retry_download', args=args) s.gp.start(s.parsed_args) files = sorted(s.root.glob(pat)) self.assertEqual(14, len(files)) # but adding --retry-download should get us back to 10 files args.append('--retry-download') s = ts.SetupDbAndCredentials() s.test_setup('test_system_retry_download', args=args) s.gp.start(s.parsed_args) files = sorted(s.root.glob(pat)) self.assertEqual(15, len(files))
def test_system_skip_video(self): s = ts.SetupDbAndCredentials() args = ['--start-date', '2017-01-01', '--end-date', '2018-01-01', '--skip-albums', '--index-only'] s.test_setup('test_system_skip_video', args=args, trash_db=True, trash_files=True) s.gp.start(s.parsed_args) db = LocalData(s.root) # Total of 20 media items db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(20, count[0]) db.store() del db s = ts.SetupDbAndCredentials() args = ['--start-date', '2017-01-01', '--end-date', '2018-01-01', '--skip-albums', '--index-only', '--skip-video'] s.test_setup('test_system_skip_video', args=args, trash_db=True, trash_files=True) s.gp.start(s.parsed_args) db = LocalData(s.root) # Total of 10 media items db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(10, count[0])
def test_sys_album_add_file(self): """tests that the album links get re-created in a new folder with a new last-date prefix when a recent photo is added to an album, also that the old folder is removed """ s = ts.SetupDbAndCredentials() args = ['--start-date', '2017-09-19', '--end-date', '2017-09-20'] s.test_setup('test_sys_album_add_file', args=args, trash_db=True, trash_files=True) s.gp.start(s.parsed_args) pat = str(albums_root / '2017' / '0923 Clones' / '*.*') files = sorted(s.root.glob(pat)) self.assertEqual(4, len(files)) # spoof the album to pretend it only got 3 files up to 2017-09-20 db = LocalData(s.root) db.cur.execute("UPDATE Albums SET EndDate='2017-09-20'," "Size=3 WHERE " "AlbumName='Clones'") db.store() args = ['--start-date', '2017-09-19', '--end-date', '2017-09-23', '--index-only'] s.test_setup('test_sys_album_add_file', args=args) s.gp.start(s.parsed_args) # the rescan will reset the date so set it back db = LocalData(s.root) db.cur.execute("UPDATE Albums SET EndDate='2017-09-20' " "WHERE AlbumName='Clones'") db.store() args = ['--skip-index', '--skip-files'] s.test_setup('test_sys_album_add_file', args=args) s.gp.start(s.parsed_args) pat = str(albums_root / '2017' / '0920 Clones' / '*.*') files = sorted(s.root.glob(pat)) self.assertEqual(4, len(files)) should_be_gone = s.root / albums_root / '2017' / '0923 Clones' self.assertFalse(should_be_gone.exists()) # test --album-date-by-first-photo # force re-download of the album db.cur.execute("UPDATE Albums SET Downloaded=0 " "WHERE AlbumName='Clones'") db.store() args = ['--skip-index', '--skip-files', '--album-date-by-first-photo'] s.test_setup('test_sys_album_add_file', args=args) s.gp.start(s.parsed_args) pat = str(albums_root / '2017' / '0919 Clones' / '*.*') files = sorted(s.root.glob(pat)) self.assertEqual(4, len(files)) should_be_gone = s.root / albums_root.absolute() \ / '2017' / '0920 Clones' self.assertFalse(should_be_gone.exists())
def test_bad_ids(self, do_download_file): do_download_file.side_effect = HTTPError(Mock(status=500), 'ouch!') s = ts.SetupDbAndCredentials() args = [ '--start-date', TestAccount.start, '--end-date', TestAccount.end, '--skip-albums' ] s.test_setup('test_bad_ids', args=args, trash_db=True, trash_files=True) s.gp.start(s.parsed_args) # check we tried to download 10 times self.assertEqual( do_download_file.call_count, TestAccount.image_count_2016, "Expected {} downloads".format(TestAccount.image_count_2016)) # this should have created a Bad IDs file bad_ids = BadIds(s.root) self.assertEqual( len(bad_ids.items), TestAccount.image_count_2016, "Expected {} Bad IDs entries".format(TestAccount.image_count_2016)) do_download_file.reset_mock() s.test_setup('test_bad_ids', args=args) s.gp.start(s.parsed_args) # this should have skipped the bad ids and not tried to download self.assertEqual(do_download_file.call_count, 0, "Expected 0 calls to do_download")
def test_zero_items_in_response(self, page_size): """ for issue https://github.com/gilesknap/gphotos-sync/issues/112 """ # note this fails with page size below 5 and that might be another API # bug # to emulate issue #112 remove the date range and set page_size = 2 # this then does download everything via media_items.list but sometimes # gets zero items with a next_page token (too expensive on quota to # always leave it like this.) page_size.return_value = 6 s = ts.SetupDbAndCredentials() args = ['--skip-albums', '--index-only', '--start-date', '1965-01-01', '--end-date', '1965-12-31'] s.test_setup('test_zero_items_in_response', trash_files=True, trash_db=True, args=args) s.gp.start(s.parsed_args) db = LocalData(s.root) db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual( 10, count[0], "expected 10 images 1965" )
def test_sys_favourites_and_dates(self): """Download favourite images in test library. Also Check that dates are set correctly """ s = ts.SetupDbAndCredentials() args = [ '--favourites-only', '--max-retries', '6', '--max-threads', '2' ] s.test_setup('test_sys_favourites', args=args, trash_files=True, trash_db=True) s.gp.start(s.parsed_args) db = LocalData(s.root) # Total of 1 out of media items db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(1, count[0]) name = s.root / 'photos/2017/09/IMG_2117.JPG' date = datetime.fromtimestamp(os.path.getmtime(str(name))) expected = datetime(2017, 9, 26, 15, 29, 44) self.assertEqual(expected, date.replace(microsecond=0), "Modify date not set correctly") if os.name == 'nt': date = datetime.fromtimestamp(os.path.getctime(name)) expected = datetime(2017, 9, 26, 15, 29, 44) self.assertEqual(expected, date.replace(microsecond=0), "Create date not set correctly")
def test_no_album_index(self): """for issue #89 - photos directly uploaded into albums dont 'list'""" s = ts.SetupDbAndCredentials() args = ['--no-album-index', '--skip-shared-albums', '--index-only'] s.test_setup('test_no_album_index', trash_files=True, trash_db=True, args=args) s.gp.start(s.parsed_args) db = LocalData(s.root) # There are 95 items but 10 were uploaded direct into a folder # so --no-album-index may affect them (but does not) # Also 5 are shared from another account (skipped due to # --skip-shared-albums AND --no-album-index) db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() # this was an attempt to prove that creating a folder and uploading # directly to it in google photos web would reproduce # https://github.com/gilesknap/gphotos-sync/issues/89 # if it had done so then we would only get 80 files t = TestAccount.image_count + TestAccount.video_count self.assertEqual( t, count[0], "expected {} files with album index off".format(t) )
def test_shared_albums(self): """Download favourite images in test library. """ s = ts.SetupDbAndCredentials() args = ["--skip-files"] s.test_setup("test_shared_albums", args=args, trash_files=True, trash_db=True) s.gp.start(s.parsed_args) t = (TestAccount.album_image_count + TestAccount.album_shared_image_count + TestAccount.shared_album_image_count + TestAccount.shared_album_shared_image_count) with LocalData(s.root) as db: db.cur.execute("SELECT COUNT() FROM AlbumFiles") count = db.cur.fetchone() self.assertEqual( t, count[0], "expected {} files in all albums including shared".format(t), ) s = ts.SetupDbAndCredentials() args = ["--skip-files", "--skip-shared-albums"] s.test_setup("test_shared_albums", args=args, trash_files=True, trash_db=True) s.gp.start(s.parsed_args) # note that unless we use --no-album-index the shared files in the # visible album will show up here t = (TestAccount.album_image_count + TestAccount.album_shared_image_count) # see above with LocalData(s.root) as db: db.cur.execute("SELECT COUNT() FROM AlbumFiles") count = db.cur.fetchone() self.assertEqual( t, count[0], "expected {} files in all albums excluding shared".format(t), )
def test_system_hard_link(self): s = ts.SetupDbAndCredentials() args = [ "--start-date", "2016-01-01", "--end-date", "2017-01-01", "--use-hardlinks", "--album", "Clones😀", ] s.test_setup("test_system_hard_link", args=args, trash_db=True, trash_files=True) s.gp.start(s.parsed_args) with LocalData(s.root) as db: # Total of 4 images db.cur.execute("SELECT COUNT() FROM AlbumFiles") count = db.cur.fetchone() self.assertEqual(4, count[0]) pat = str(albums_root / "*" / "*Clones😀" / "*") links: List[Path] = sorted(s.root.glob(pat)) self.assertEqual(4, len(links)) for link in links: self.assertTrue(not link.is_symlink()) # verify that switching to soft links in the same folder # overwrites all hard links args = [ "--start-date", "2016-01-01", "--end-date", "2017-01-01", "--album", "Clones😀", "--flush-index", ] s.test_setup("test_system_hard_link", args=args, trash_db=False, trash_files=False) s.gp.start(s.parsed_args) with LocalData(s.root) as db: # Total of 4 images db.cur.execute("SELECT COUNT() FROM AlbumFiles") count = db.cur.fetchone() self.assertEqual(4, count[0]) pat = str(albums_root / "*" / "*Clones😀" / "*") links = sorted(s.root.glob(pat)) self.assertEqual(4, len(links)) for link in links: self.assertTrue(link.is_symlink())
def test_system_incremental(self): s = ts.SetupDbAndCredentials() args = [ '--start-date', '2016-01-01', '--end-date', '2017-01-01', '--skip-albums', '--index-only' ] s.test_setup('test_system_incremental', args=args, trash_db=True, trash_files=True) s.gp.start(s.parsed_args) db = LocalData(s.root) db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(10, count[0]) # force an update the 'most recently scanned file' record # (this is normally only set for complete scans and was tested in # test_sys_whole_library) db.set_scan_date(Utils.string_to_date("2017-01-01")) db.store() s = ts.SetupDbAndCredentials() args = ['--skip-albums', '--index-only'] s.test_setup('test_system_incremental', args=args) s.gp.start(s.parsed_args) # this should add in everything in 2017 (20 files) db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(30, count[0]) d_date = db.get_scan_date() self.assertEqual(d_date.date(), datetime.date(2017, 9, 26)) s = ts.SetupDbAndCredentials() args = ['--skip-albums', '--index-only', '--rescan'] s.test_setup('test_system_incremental', args=args) s.gp.start(s.parsed_args) # this should add in everything db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(80, count[0])
def test_base_media(self): b = BaseMedia() with pytest.raises(NotImplementedError): x = b.size with pytest.raises(NotImplementedError): x = b.id with pytest.raises(NotImplementedError): x = b.description with pytest.raises(NotImplementedError): x = b.orig_name with pytest.raises(NotImplementedError): x = b.create_date with pytest.raises(NotImplementedError): x = b.modify_date with pytest.raises(NotImplementedError): x = b.mime_type with pytest.raises(NotImplementedError): x = b.url with patch('gphotos.BaseMedia.os_name', new_callable=PropertyMock(return_value='nt')): assert b.validate_encoding('hello.txt') == 'hello.txt' """Download archived images in test library using flat folders (and windows file name restrictions) """ s = ts.SetupDbAndCredentials() args = [ '--archived', '--skip-albums', '--start-date', '2019-10-01', '--use-flat-path' ] s.test_setup('test_base_media', args=args, trash_files=True, trash_db=True) s.gp.start(s.parsed_args) db = LocalData(s.root) # Total of 1 out of media items db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(1, count[0]) pat = str(photos_root / '2019-11' / '*.*') files = sorted(s.root.glob(pat)) self.assertEqual(1, len(files))
def test_base_media(self): """Download archived images in test library using flat folders (and windows file name restrictions) """ b = BaseMedia() with pytest.raises(NotImplementedError): x = b.size with pytest.raises(NotImplementedError): x = b.id with pytest.raises(NotImplementedError): x = b.description with pytest.raises(NotImplementedError): x = b.orig_name with pytest.raises(NotImplementedError): x = b.create_date with pytest.raises(NotImplementedError): x = b.modify_date with pytest.raises(NotImplementedError): x = b.mime_type with pytest.raises(NotImplementedError): x = b.url print(x) # for pylint s = ts.SetupDbAndCredentials() args = [ "--skip-albums", "--start-date", "2020-01-01", "--use-flat-path", ] s.test_setup("test_base_media", args=args, trash_files=True, trash_db=True) s.gp.start(s.parsed_args) db = LocalData(s.root) # Total of 1 out of media items db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(1, count[0]) pat = str(photos_root / "2020-04" / "*.*") files = sorted(s.root.glob(pat)) self.assertEqual(1, len(files))
def test_skip_video_on_album(self): """ verify that skip video works when syncing a specific folder """ s = ts.SetupDbAndCredentials() args = ["--skip-video", "--album", "Movies"] s.test_setup("test_skip_video_on_album", args=args, trash_files=True, trash_db=True) s.gp.start(s.parsed_args) with LocalData(s.root) as db: db.cur.execute("SELECT COUNT() FROM AlbumFiles") count = db.cur.fetchone() self.assertEqual(0, count[0], "expected 0 video files in album Movies")
def ___test_folder_not_writeable(self): # make sure we get permissions error and not 'database is locked' s = ts.SetupDbAndCredentials() s.test_setup("test_folder_not_writeable", trash_files=True, trash_db=True) try: if os.name == "nt": os.chmod(str(s.root), stat.S_IREAD) else: s.root.chmod(0o444) with self.assertRaises(PermissionError): s.gp.main([str(s.root), "--skip-shared-albums"]) finally: if os.name == "nt": os.chmod(str(s.root), stat.S_IWRITE | stat.S_IREAD) else: os.chmod(str(s.root), 0o777) shutil.rmtree(str(s.root))
def test_sys_favourites(self): """Download favourite images in test library. """ s = ts.SetupDbAndCredentials() args = ['--favourites-only', '--skip-albums'] s.test_setup('test_sys_favourites', args=args, trash_files=True, trash_db=True) s.gp.start(s.parsed_args) db = LocalData(s.root) # Total of 1 out of media items db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(1, count[0])
def test_max_retries_hit(self): s = ts.SetupDbAndCredentials() args = ["--skip-albums"] s.test_setup("test_max_retries_hit", args=args, trash_files=True, trash_db=True) s.gp.start(s.parsed_args) db = LocalData(s.root) db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(90, count[0]) pat = str(photos_root / "*" / "*" / "*") self.assertEqual( 9, len(sorted(s.root.glob(pat))), "mismatch on image file count" )
def test_sys_album_add_file(self): """tests that the album links get re-created in a new folder with a new last-date prefix when a recent photo is added to an album, also that the old folder is removed """ s = ts.SetupDbAndCredentials() args = ['--start-date', '2017-09-19', '--end-date', '2017-09-20'] s.test_setup('test_sys_album_add_file', args=args, trash_db=True, trash_files=True) s.gp.start(s.parsed_args) # the date will be picked from the album contents which still includes # the file that is not yet downloaded pat = str(albums_root / '2017' / '0923 Clones' / '*.*') files = sorted(s.root.glob(pat)) self.assertEqual(3, len(files)) # spoof the album to pretend it only got 3 files up to 2017-09-20 db = LocalData(s.root) db.cur.execute("UPDATE Albums SET EndDate='2017-09-20'," "Size=3 WHERE " "AlbumName='Clones'") db.store() args = [ '--start-date', '2017-09-19', '--end-date', '2017-09-23', '--index-only' ] s.test_setup('test_sys_album_add_file', args=args) s.gp.start(s.parsed_args) # the rescan will reset the date so set it back db = LocalData(s.root) db.cur.execute("UPDATE Albums SET EndDate='2017-09-20' " "WHERE AlbumName='Clones'") db.store() args = ['--skip-index', '--skip-files'] s.test_setup('test_sys_album_add_file', args=args) s.gp.start(s.parsed_args) pat = str(albums_root / '2017' / '0920 Clones' / '*.*') files = sorted(s.root.glob(pat)) self.assertEqual(4, len(files)) self.assertFalse((albums_root / '2017' / '0923 Clones').is_dir())
def test_sys_archived(self): """Download archived images in test library. """ s = ts.SetupDbAndCredentials() args = ["--archived", "--skip-albums", "--start-date", "2019-10-01"] s.test_setup("test_sys_archived", args=args, trash_files=True, trash_db=True) s.gp.start(s.parsed_args) db = LocalData(s.root) # Total of 1 out of media items db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(1, count[0])
def download_faves(self, expected=4, no_response=False, trash=True): # Download favourite images only in test library. s = ts.SetupDbAndCredentials() args = [ '--album', 'Clones', '--use-flat-path', '--omit-album-date', '--rescan' ] s.test_setup('test_google_albums_sync', args=args, trash_files=trash, trash_db=trash) s.gp.start(s.parsed_args) db = LocalData(s.root) # Total of 1 out of media items db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(expected, count[0])
def test_fs_overrides(self): s = ts.SetupDbAndCredentials() args = ["--ntfs", "--max-filename", "30"] s.test_setup("test_fs_overrides", args=args, trash_db=True, trash_files=True) s.gp.fs_checks(s.root, s.parsed_args) self.assertFalse(get_check().is_linux) self.assertEquals(get_check().max_filename, 30) if os_name != "nt": args = [] s.test_setup("test_fs_overrides", args=args, trash_db=True, trash_files=True) s.gp.fs_checks(s.root, s.parsed_args) self.assertTrue(get_check().is_linux) self.assertEquals(get_check().max_filename, 255)
def test_system_date_range(self): s = ts.SetupDbAndCredentials() args = [ "--start-date", "2016-01-01", "--end-date", "2017-01-01", "--skip-albums", "--index-only", ] s.test_setup("test_system_date_range", args=args, trash_db=True, trash_files=True) s.gp.start(s.parsed_args) db = LocalData(s.root) # Total of 10 images db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(10, count[0])
def test_bad_ids(self, do_download_file): do_download_file.side_effect = HTTPError(Mock(status=500), 'ouch!') s = ts.SetupDbAndCredentials() args = [ '--start-date', '2016-01-01', '--end-date', '2017-01-01', '--skip-albums' ] s.test_setup('test_bad_ids', args=args, trash_db=True, trash_files=True) s.gp.start(s.parsed_args) # check we tried to download 10 times self.assertEqual(do_download_file.call_count, 10) # this should have created a Bad IDs file bad_ids = BadIds(s.root) self.assertEqual(len(bad_ids.items), 10) s.test_setup('test_bad_ids', args=args) s.gp.start(s.parsed_args) # this should have skipped the bad ids and not tried to download self.assertEqual(do_download_file.call_count, 10)
def test_sys_whole_library(self): """Download all images in test library. Check filesystem for correct files Check DB for correct entries Note, if you select --skip-video then we use the search API instead of list This then misses these 2 files: subaru1.jpg|photos/1998/10 subaru2.jpg|photos/1998/10 todo investigate above """ s = ts.SetupDbAndCredentials() s.test_setup('test_sys_whole_library', trash_files=True, trash_db=True) s.gp.main([str(s.root), '--skip-shared-albums']) db = LocalData(s.root) db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() t = TestAccount.image_count + \ TestAccount.video_count + \ TestAccount.shared_image_count self.assertEqual(t, count[0], "expected {} items excluding shared albums".format(t)) db.cur.execute( "SELECT COUNT() FROM SyncFiles where MimeType like 'video%'") count = db.cur.fetchone() self.assertEqual(TestAccount.video_count, count[0]) db.cur.execute("SELECT COUNT() FROM Albums;") count = db.cur.fetchone() t = TestAccount.album_count self.assertEqual(t, count[0], 'expected {} total album count'.format(t)) for year, images, shared, videos in zip( TestAccount.image_years, TestAccount.images_per_year, TestAccount.shared_images_per_year, TestAccount.videos_per_year): # looking for .jpg .JPG .png .jfif pat = str(photos_root / str(year) / '*' / '*.[JjpP]*') self.assertEqual( images + shared, len(sorted(s.root.glob(pat))), "mismatch on image file count for year {}".format(year)) # looking for *.mp4 pat = str(photos_root / str(year) / '*' / '*.mp4') self.assertEqual( videos, len(sorted(s.root.glob(pat))), "mismatch on video file count for year {}".format(year)) for idx, a in enumerate(TestAccount.album_names): pat = str(albums_root / '*' / a / '*') t = TestAccount.album_images[idx] + \ TestAccount.album_shared_images[idx] self.assertEqual( t, len(sorted(s.root.glob(pat))), "album {} does not contain {} images".format( a, TestAccount.album_images[idx])) # check that the most recent scanned file date was recorded d_date = db.get_scan_date() self.assertEqual(d_date.date(), TestAccount.latest_date) # check that re-running does not get any db constraint violations etc. # also test the comparison feature, by comparing the library with its # own gphotos-sync output s.test_setup('test_sys_whole_library', args=['--compare-folder', str(s.root)]) s.gp.start(s.parsed_args) # There is one pair of files that are copies of the same image with # same UID. This looks like one pair of duplicates and one extra file # in the comparison folder. (also the gphotos database etc appear # as missing files) pat = str(comparison_root / 'missing_files' / '*') files = sorted(s.root.glob(pat)) self.assertEqual(0, len(files), "expected 0 missing files") pat = str(comparison_root / 'extra_files' / '*' / '*' / '*' / '*') files = sorted(s.root.glob(pat)) self.assertEqual(0, len(files), "expected 0 extra files") pat = str(comparison_root / 'duplicates' / '*') files = sorted(s.root.glob(pat)) self.assertEqual(0, len(files), "expected 0 duplicate files")
def test_sys_whole_library(self): """Download all images in test library. Check filesystem for correct files Check DB for correct entries Note, if you select --skip-video then we use the search API instead of list This then misses these 3 files: subaru1.jpg|photos/1998/10 subaru2.jpg|photos/1998/10 DSCF0030.JPG|photos/2000/02 todo investigate above """ s = ts.SetupDbAndCredentials() s.test_setup('test_sys_whole_library', trash_files=True, trash_db=True) s.gp.main([str(s.root)]) db = LocalData(s.root) # Total of 80 media items db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(85, count[0]) # with 10 videos db.cur.execute( "SELECT COUNT() FROM SyncFiles where MimeType like 'video%'") count = db.cur.fetchone() self.assertEqual(10, count[0]) # and 4 albums db.cur.execute("SELECT COUNT() FROM Albums;") count = db.cur.fetchone() self.assertEqual(5, count[0]) # downloaded 10 images in each of the years in the test data image_years = [2017, 2016, 2015, 2001, 2000, 1998, 1965] image_count = [15, 10, 10, 10, 10, 10, 10] for year, count in zip(image_years, image_count): # looking for .jpg .JPG .png .jfif pat = str(photos_root / str(year) / '*' / '*.[JjpP]*') self.assertEqual(count, len(sorted(s.root.glob(pat)))) # and 10 mp4 for 2017 pat = str(photos_root / '2017' / '*' / '*.mp4') files = sorted(s.root.glob(pat)) self.assertEqual(10, len(files)) # 4 albums the following item counts album_items = [10, 10, 4, 16] albums = [ r'0101?Album?2001', r'0528?Movies', r'0923?Clones', r'0926?Album?2016' ] for idx, a in enumerate(albums): pat = str(albums_root / '*' / a / '*') print('looking for album items at {}'.format(pat)) self.assertEqual(album_items[idx], len(sorted(s.root.glob(pat)))) # check that the most recent scanned file date was recorded d_date = db.get_scan_date() self.assertEqual(d_date.date(), datetime.date(2017, 9, 26)) # check that re-running does not get any db constraint violations etc. # also test the comparison feature, by comparing the library with its # own gphotos-sync output s.test_setup('test_sys_whole_library', args=['--compare-folder', str(s.root)]) s.gp.start(s.parsed_args) # There is one pair of files that are copies of the same image with # same UID. This looks like one pair of duplicates and one extra file # in the comparison folder. (also the gphotos database etc appear # as missing files) pat = str(comparison_root / 'missing_files' / '*') files = sorted(s.root.glob(pat)) self.assertEqual(0, len(files)) pat = str(comparison_root / 'extra_files' / '*' / '*' / '*' / '*') files = sorted(s.root.glob(pat)) self.assertEqual(0, len(files)) pat = str(comparison_root / 'duplicates' / '*') files = sorted(s.root.glob(pat)) self.assertEqual(0, len(files))