def test_shared_albums(self): """Download favourite images in test library. """ s = ts.SetupDbAndCredentials() args = ['--skip-files'] s.test_setup('test_shared_albums', args=args, trash_files=True, trash_db=True) s.gp.start(s.parsed_args) with LocalData(s.root) as db: db.cur.execute("SELECT COUNT() FROM AlbumFiles") count = db.cur.fetchone() self.assertEqual(56, count[0]) s = ts.SetupDbAndCredentials() args = ['--skip-files', '--skip-shared-albums'] s.test_setup('test_shared_albums', args=args, trash_files=True, trash_db=True) s.gp.start(s.parsed_args) with LocalData(s.root) as db: db.cur.execute("SELECT COUNT() FROM AlbumFiles") count = db.cur.fetchone() self.assertEqual(50, count[0])
def test_system_incremental(self): s = SetupDbAndCredentials() args = [ '--end-date', '1970-01-01', '--skip-picasa', '--index-only', '--skip-video' ] s.test_setup('system_incremental', args=args, trash_db=True) s.gp.start(s.parsed_args) db = LocalData(s.root) db.cur.execute("SELECT COUNT() FROM SyncFiles WHERE MediaType = 0;") count = db.cur.fetchone() self.assertEqual(count[0], 0) (d_date, _) = db.get_scan_dates() self.assertEqual(d_date.date(), datetime.date(1970, 1, 1)) args = [ '--end-date', '2017-09-19', '--skip-picasa', '--index-only', '--skip-video' ] s.test_setup('system_incremental', args=args) s.gp.start(s.parsed_args) db.cur.execute("SELECT COUNT() FROM SyncFiles WHERE MediaType = 0;") count = db.cur.fetchone() # todo why is this 60 not 70? self.assertEqual(count[0], 56) (d_date, _) = db.get_scan_dates() self.assertEqual(d_date.date(), datetime.date(2017, 9, 18))
def test_do_delete(self): s = ts.SetupDbAndCredentials() args = [ '--start-date', '2017-01-01', '--end-date', '2018-01-01', '--skip-video', '--skip-albums', '--do-delete' ] s.test_setup('test_do_delete', args=args, trash_db=True, trash_files=True) s.gp.start(s.parsed_args) pat = str(photos_root / '2017' / '??' / '*.[JjpP]*') files = sorted(s.root.glob(pat)) self.assertEqual(10, len(files)) db = LocalData(s.root) # noinspection SqlWithoutWhere db.cur.execute("DELETE FROM SyncFiles;") db.store() args.append('--skip-index') s = ts.SetupDbAndCredentials() s.test_setup('test_do_delete', args=args) s.gp.start(s.parsed_args) # should have removed all files files = sorted(s.root.glob(pat)) self.assertEqual(0, len(files))
def test_drive_delete(self): s = SetupDbAndCredentials() args = [ '--start-date', '2017-01-01', '--end-date', '2017-09-19', '--skip-picasa', '--do-delete', '--skip-video' ] s.test_setup('test_drive_delete', args=args, trash_files=True) pat = os.path.join(s.root, 'drive', 'Google Photos', '2017', '*.*') print(pat) s.gp.start(s.parsed_args) self.assertEqual(10, len(glob.glob(pat))) s.test_setup('test_drive_delete', args=args) s.gp.drive_sync.check_for_removed() self.assertEqual(10, len(glob.glob(pat))) db = LocalData(s.root) db.cur.execute("DELETE FROM SyncFiles WHERE MediaType = 0 " "AND Filename LIKE '%201701%';") db.store() s.gp.drive_sync.check_for_removed() self.assertEqual(0, len(glob.glob(pat)))
def test_sys_album_add_file(self): """tests that the album links get re-created in a new folder with a new last-date prefix when a recent photo is added to an album, also that the old folder is removed """ s = ts.SetupDbAndCredentials() args = ['--start-date', '2017-09-19', '--end-date', '2017-09-20'] s.test_setup('test_sys_album_add_file', args=args, trash_db=True, trash_files=True) s.gp.start(s.parsed_args) pat = str(albums_root / '2017' / '0923 Clones' / '*.*') files = sorted(s.root.glob(pat)) self.assertEqual(4, len(files)) # spoof the album to pretend it only got 3 files up to 2017-09-20 db = LocalData(s.root) db.cur.execute("UPDATE Albums SET EndDate='2017-09-20'," "Size=3 WHERE " "AlbumName='Clones'") db.store() args = ['--start-date', '2017-09-19', '--end-date', '2017-09-23', '--index-only'] s.test_setup('test_sys_album_add_file', args=args) s.gp.start(s.parsed_args) # the rescan will reset the date so set it back db = LocalData(s.root) db.cur.execute("UPDATE Albums SET EndDate='2017-09-20' " "WHERE AlbumName='Clones'") db.store() args = ['--skip-index', '--skip-files'] s.test_setup('test_sys_album_add_file', args=args) s.gp.start(s.parsed_args) pat = str(albums_root / '2017' / '0920 Clones' / '*.*') files = sorted(s.root.glob(pat)) self.assertEqual(4, len(files)) should_be_gone = s.root / albums_root / '2017' / '0923 Clones' self.assertFalse(should_be_gone.exists()) # test --album-date-by-first-photo # force re-download of the album db.cur.execute("UPDATE Albums SET Downloaded=0 " "WHERE AlbumName='Clones'") db.store() args = ['--skip-index', '--skip-files', '--album-date-by-first-photo'] s.test_setup('test_sys_album_add_file', args=args) s.gp.start(s.parsed_args) pat = str(albums_root / '2017' / '0919 Clones' / '*.*') files = sorted(s.root.glob(pat)) self.assertEqual(4, len(files)) should_be_gone = s.root / albums_root.absolute() \ / '2017' / '0920 Clones' self.assertFalse(should_be_gone.exists())
def test_system_hard_link(self): s = ts.SetupDbAndCredentials() args = [ "--start-date", "2016-01-01", "--end-date", "2017-01-01", "--use-hardlinks", "--album", "Clones😀", ] s.test_setup("test_system_hard_link", args=args, trash_db=True, trash_files=True) s.gp.start(s.parsed_args) with LocalData(s.root) as db: # Total of 4 images db.cur.execute("SELECT COUNT() FROM AlbumFiles") count = db.cur.fetchone() self.assertEqual(4, count[0]) pat = str(albums_root / "*" / "*Clones😀" / "*") links: List[Path] = sorted(s.root.glob(pat)) self.assertEqual(4, len(links)) for link in links: self.assertTrue(not link.is_symlink()) # verify that switching to soft links in the same folder # overwrites all hard links args = [ "--start-date", "2016-01-01", "--end-date", "2017-01-01", "--album", "Clones😀", "--flush-index", ] s.test_setup("test_system_hard_link", args=args, trash_db=False, trash_files=False) s.gp.start(s.parsed_args) with LocalData(s.root) as db: # Total of 4 images db.cur.execute("SELECT COUNT() FROM AlbumFiles") count = db.cur.fetchone() self.assertEqual(4, count[0]) pat = str(albums_root / "*" / "*Clones😀" / "*") links = sorted(s.root.glob(pat)) self.assertEqual(4, len(links)) for link in links: self.assertTrue(link.is_symlink())
def test_new_schema(self): s = SetupDbAndCredentials() # get a single file args = ['--drive-file', '20170102_094337.jpg', '--skip-picasa'] s.test_setup('new_schema', args=args, trash_files=True) s.gp.start(s.parsed_args) db = LocalData(s.root) db.cur.execute( 'UPDATE Globals SET Version = "1.0" WHERE Id IS 1') db.store() db.con.close() s.test_setup('new_schema', args=args) s.gp.start(s.parsed_args) db = LocalData(s.root) db.cur.execute('SELECT Version From Globals WHERE Id IS 1') version = float(db.cur.fetchone()[0]) self.assertEqual(version, LocalData.VERSION) db.cur.execute( 'UPDATE Globals SET Version = "100.0" WHERE Id IS 1') db.store() with self.assertRaises(ValueError): s.test_setup('new_schema', args=args)
def test_new_schema(self): """ check that the database initialization errors if the version of the data store is newer than the code version UPDATE: use --fave so that we do download a photo. A previous bug was only picked up when this replaced --skip-files""" s = SetupDbAndCredentials() # get a single file args = ['--favourites-only', '--skip-albums'] s.test_setup('new_schema', args=args, trash_files=True) s.gp.start(s.parsed_args) db = LocalData(s.root) db.cur.execute('UPDATE Globals SET Version = 1.0 WHERE Id IS 1') db.store() db.con.close() s.test_setup('new_schema', args=args) s.gp.start(s.parsed_args) db = LocalData(s.root) db.cur.execute('SELECT Version From Globals WHERE Id IS 1') version = float(db.cur.fetchone()[0]) self.assertEqual(version, LocalData.VERSION) db.cur.execute('UPDATE Globals SET Version = 100.0 WHERE Id IS 1') db.store() with self.assertRaises(ValueError): s.test_setup('new_schema', args=args)
def test_no_album_index(self): """for issue #89 - photos directly uploaded into albums dont 'list'""" s = ts.SetupDbAndCredentials() args = ['--no-album-index', '--skip-shared-albums', '--index-only'] s.test_setup('test_no_album_index', trash_files=True, trash_db=True, args=args) s.gp.start(s.parsed_args) db = LocalData(s.root) # There are 95 items but 10 were uploaded direct into a folder # so --no-album-index may affect them (but does not) # Also 5 are shared from another account (skipped due to # --skip-shared-albums AND --no-album-index) db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() # this was an attempt to prove that creating a folder and uploading # directly to it in google photos web would reproduce # https://github.com/gilesknap/gphotos-sync/issues/89 # if it had done so then we would only get 80 files t = TestAccount.image_count + TestAccount.video_count self.assertEqual( t, count[0], "expected {} files with album index off".format(t) )
def test_system_index(self): s = SetupDbAndCredentials() args = [ '--start-date', '2016-01-01', '--end-date', '2017-09-19', '--index-only', '--skip-video' ] s.test_setup('system_index', args=args, trash_db=True) s.gp.start(s.parsed_args) db = LocalData(s.root) # 70 items but 10 are videos = 60 db.cur.execute("SELECT COUNT() FROM SyncFiles WHERE MediaType = 0;") count = db.cur.fetchone() self.assertEqual(count[0], 56) # 4 albums with 26 files 10 are videos = 16 db.cur.execute("SELECT COUNT() FROM AlbumFiles;") count = db.cur.fetchone() self.assertEqual(count[0], 16) db.cur.execute("SELECT COUNT() FROM Albums;") count = db.cur.fetchone() self.assertEqual(count[0], 4) # 16 picasa files from albums but 6 overlap db.cur.execute("SELECT COUNT() FROM SyncFiles WHERE MediaType = 1;") count = db.cur.fetchone() self.assertEqual(count[0], 10)
def test_system_download_name(self): s = SetupDbAndCredentials() # get a single file args = [ '--drive-file', 'IMG_20170131_163640444.jpg', '--skip-picasa', '--skip-video' ] s.test_setup('system_download_name', args=args, trash_files=True) s.gp.start(s.parsed_args) db = LocalData(s.root) db.cur.execute("SELECT COUNT() FROM SyncFiles WHERE MediaType = 0;") count = db.cur.fetchone() self.assertEqual(count[0], 1) db.cur.execute("SELECT FileName FROM SyncFiles WHERE MediaType = 0;") name = db.cur.fetchone() self.assertEqual(name[0], 'IMG_20170131_163640444.jpg') expected_file = os.path.join( s.root, 'drive/Google Photos/2017/IMG_20170131_163640444.jpg') self.assertEqual(True, os.path.exists(expected_file)) db.con.close() args = [ '--drive-file', 'IMG_20170131_163643380.jpg', '--skip-video', '--skip-picasa', '--all-drive', '--flush-index' ] s.test_setup('system_download_name', args=args, trash_files=True) s.gp.start(s.parsed_args) expected_file = os.path.join( s.root, 'drive/Google Photos/2017/IMG_20170131_163643380.jpg') self.assertEqual(True, os.path.exists(expected_file))
def test_zero_items_in_response(self, page_size): """ for issue https://github.com/gilesknap/gphotos-sync/issues/112 """ # note this fails with page size below 5 and that might be another API # bug # to emulate issue #112 remove the date range and set page_size = 2 # this then does download everything via media_items.list but sometimes # gets zero items with a next_page token (too expensive on quota to # always leave it like this.) page_size.return_value = 6 s = ts.SetupDbAndCredentials() args = ['--skip-albums', '--index-only', '--start-date', '1965-01-01', '--end-date', '1965-12-31'] s.test_setup('test_zero_items_in_response', trash_files=True, trash_db=True, args=args) s.gp.start(s.parsed_args) db = LocalData(s.root) db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual( 10, count[0], "expected 10 images 1965" )
def test_sys_favourites_and_dates(self): """Download favourite images in test library. Also Check that dates are set correctly """ s = ts.SetupDbAndCredentials() args = [ '--favourites-only', '--max-retries', '6', '--max-threads', '2' ] s.test_setup('test_sys_favourites', args=args, trash_files=True, trash_db=True) s.gp.start(s.parsed_args) db = LocalData(s.root) # Total of 1 out of media items db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(1, count[0]) name = s.root / 'photos/2017/09/IMG_2117.JPG' date = datetime.fromtimestamp(os.path.getmtime(str(name))) expected = datetime(2017, 9, 26, 15, 29, 44) self.assertEqual(expected, date.replace(microsecond=0), "Modify date not set correctly") if os.name == 'nt': date = datetime.fromtimestamp(os.path.getctime(name)) expected = datetime(2017, 9, 26, 15, 29, 44) self.assertEqual(expected, date.replace(microsecond=0), "Create date not set correctly")
def test_system_incremental(self): s = ts.SetupDbAndCredentials() args = [ "--start-date", TestAccount.start, "--end-date", TestAccount.end, "--skip-albums", "--index-only", ] s.test_setup("test_system_incremental", args=args, trash_db=True, trash_files=True) s.gp.start(s.parsed_args) db = LocalData(s.root) db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual( TestAccount.image_count_2016, count[0], "expected {} items in 2016".format(TestAccount.image_count_2016), ) # force an update to the 'most recently scanned file' record # (this is normally only set for complete scans and was tested in # test_sys_whole_library) db.set_scan_date(Utils.string_to_date("2017-01-01")) db.store() s = ts.SetupDbAndCredentials() args = ["--skip-albums", "--index-only"] s.test_setup("test_system_incremental", args=args) s.gp.start(s.parsed_args) # this should add in everything in 2017 onwards (21 files) db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() t = (TestAccount.image_count_2016 + TestAccount.item_count_2017 + TestAccount.item_count_2020) self.assertEqual( t, count[0], "expected file count from 2016 and 2017 to be {}".format(t)) d_date = db.get_scan_date() self.assertEqual(d_date.date(), TestAccount.latest_date) s = ts.SetupDbAndCredentials() args = ["--skip-albums", "--index-only", "--rescan"] s.test_setup("test_system_incremental", args=args) s.gp.start(s.parsed_args) # this should add in everything db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() t = TestAccount.image_count + TestAccount.video_count self.assertEqual( t, count[0], "expected a total of {} items after full sync".format(t))
def test_shared_albums(self): """Download favourite images in test library. """ s = ts.SetupDbAndCredentials() args = ["--skip-files"] s.test_setup("test_shared_albums", args=args, trash_files=True, trash_db=True) s.gp.start(s.parsed_args) t = (TestAccount.album_image_count + TestAccount.album_shared_image_count + TestAccount.shared_album_image_count + TestAccount.shared_album_shared_image_count) with LocalData(s.root) as db: db.cur.execute("SELECT COUNT() FROM AlbumFiles") count = db.cur.fetchone() self.assertEqual( t, count[0], "expected {} files in all albums including shared".format(t), ) s = ts.SetupDbAndCredentials() args = ["--skip-files", "--skip-shared-albums"] s.test_setup("test_shared_albums", args=args, trash_files=True, trash_db=True) s.gp.start(s.parsed_args) # note that unless we use --no-album-index the shared files in the # visible album will show up here t = (TestAccount.album_image_count + TestAccount.album_shared_image_count) # see above with LocalData(s.root) as db: db.cur.execute("SELECT COUNT() FROM AlbumFiles") count = db.cur.fetchone() self.assertEqual( t, count[0], "expected {} files in all albums excluding shared".format(t), )
def test_picasa_delete(self): s = SetupDbAndCredentials() args = ['--album', '2Photos', '--skip-drive', '--do-delete'] s.test_setup('test_picasa_delete', args=args, trash_files=True) s.gp.start(s.parsed_args) pat = os.path.join(s.root, 'picasa', '2016', '01', '*.*') self.assertEqual(2, len(glob.glob(pat))) s.test_setup('test_picasa_delete', args=args) s.gp.picasa_sync.check_for_removed() self.assertEqual(2, len(glob.glob(pat))) db = LocalData(s.root) db.cur.execute("DELETE FROM SyncFiles WHERE MediaType = 1;") db.store() s.gp.picasa_sync.check_for_removed() self.assertEqual(0, len(glob.glob(pat)))
def test_base_media(self): """Download archived images in test library using flat folders (and windows file name restrictions) """ b = BaseMedia() with pytest.raises(NotImplementedError): x = b.size with pytest.raises(NotImplementedError): x = b.id with pytest.raises(NotImplementedError): x = b.description with pytest.raises(NotImplementedError): x = b.orig_name with pytest.raises(NotImplementedError): x = b.create_date with pytest.raises(NotImplementedError): x = b.modify_date with pytest.raises(NotImplementedError): x = b.mime_type with pytest.raises(NotImplementedError): x = b.url print(x) # for pylint s = ts.SetupDbAndCredentials() args = [ "--skip-albums", "--start-date", "2020-01-01", "--use-flat-path", ] s.test_setup("test_base_media", args=args, trash_files=True, trash_db=True) s.gp.start(s.parsed_args) db = LocalData(s.root) # Total of 1 out of media items db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(1, count[0]) pat = str(photos_root / "2020-04" / "*.*") files = sorted(s.root.glob(pat)) self.assertEqual(1, len(files))
def test_base_media(self): b = BaseMedia() with pytest.raises(NotImplementedError): x = b.size with pytest.raises(NotImplementedError): x = b.id with pytest.raises(NotImplementedError): x = b.description with pytest.raises(NotImplementedError): x = b.orig_name with pytest.raises(NotImplementedError): x = b.create_date with pytest.raises(NotImplementedError): x = b.modify_date with pytest.raises(NotImplementedError): x = b.mime_type with pytest.raises(NotImplementedError): x = b.url with patch('gphotos.BaseMedia.os_name', new_callable=PropertyMock(return_value='nt')): assert b.validate_encoding('hello.txt') == 'hello.txt' """Download archived images in test library using flat folders (and windows file name restrictions) """ s = ts.SetupDbAndCredentials() args = [ '--archived', '--skip-albums', '--start-date', '2019-10-01', '--use-flat-path' ] s.test_setup('test_base_media', args=args, trash_files=True, trash_db=True) s.gp.start(s.parsed_args) db = LocalData(s.root) # Total of 1 out of media items db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(1, count[0]) pat = str(photos_root / '2019-11' / '*.*') files = sorted(s.root.glob(pat)) self.assertEqual(1, len(files))
def test_system_skip_video(self): s = ts.SetupDbAndCredentials() args = ['--start-date', '2017-01-01', '--end-date', '2018-01-01', '--skip-albums', '--index-only'] s.test_setup('test_system_skip_video', args=args, trash_db=True, trash_files=True) s.gp.start(s.parsed_args) db = LocalData(s.root) # Total of 20 media items db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(20, count[0]) db.store() del db s = ts.SetupDbAndCredentials() args = ['--start-date', '2017-01-01', '--end-date', '2018-01-01', '--skip-albums', '--index-only', '--skip-video'] s.test_setup('test_system_skip_video', args=args, trash_db=True, trash_files=True) s.gp.start(s.parsed_args) db = LocalData(s.root) # Total of 10 media items db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(10, count[0])
def test_system_match(self): s = SetupDbAndCredentials() # attempting to select files that have issues with matching in album # references args = [ '--drive-file', 'subaru', '--all-drive', '--skip-picasa', '--skip-video' ] s.test_setup('test_system_match', args=args, trash_files=True) s.gp.start(s.parsed_args) # verify db contents db = LocalData(s.root) results = db.get_files_by_search(media_type=0) count = 0 for _ in results: count += 1 self.assertEqual(count, 2) expected_files = os.path.join( s.root, 'drive/Google Photos/9999/Cars/subaru?.jpg') count = len(glob.glob(expected_files)) self.assertEqual(count, 2)
def test_sys_album_add_file(self): """tests that the album links get re-created in a new folder with a new last-date prefix when a recent photo is added to an album, also that the old folder is removed """ s = ts.SetupDbAndCredentials() args = ['--start-date', '2017-09-19', '--end-date', '2017-09-20'] s.test_setup('test_sys_album_add_file', args=args, trash_db=True, trash_files=True) s.gp.start(s.parsed_args) # the date will be picked from the album contents which still includes # the file that is not yet downloaded pat = str(albums_root / '2017' / '0923 Clones' / '*.*') files = sorted(s.root.glob(pat)) self.assertEqual(3, len(files)) # spoof the album to pretend it only got 3 files up to 2017-09-20 db = LocalData(s.root) db.cur.execute("UPDATE Albums SET EndDate='2017-09-20'," "Size=3 WHERE " "AlbumName='Clones'") db.store() args = [ '--start-date', '2017-09-19', '--end-date', '2017-09-23', '--index-only' ] s.test_setup('test_sys_album_add_file', args=args) s.gp.start(s.parsed_args) # the rescan will reset the date so set it back db = LocalData(s.root) db.cur.execute("UPDATE Albums SET EndDate='2017-09-20' " "WHERE AlbumName='Clones'") db.store() args = ['--skip-index', '--skip-files'] s.test_setup('test_sys_album_add_file', args=args) s.gp.start(s.parsed_args) pat = str(albums_root / '2017' / '0920 Clones' / '*.*') files = sorted(s.root.glob(pat)) self.assertEqual(4, len(files)) self.assertFalse((albums_root / '2017' / '0923 Clones').is_dir())
def test_skip_video_on_album(self): """ verify that skip video works when syncing a specific folder """ s = ts.SetupDbAndCredentials() args = ["--skip-video", "--album", "Movies"] s.test_setup("test_skip_video_on_album", args=args, trash_files=True, trash_db=True) s.gp.start(s.parsed_args) with LocalData(s.root) as db: db.cur.execute("SELECT COUNT() FROM AlbumFiles") count = db.cur.fetchone() self.assertEqual(0, count[0], "expected 0 video files in album Movies")
def test_system_index_movies(self): s = SetupDbAndCredentials() # this query gets some 'creations' Movies and a folder containing them args = ['--album', 'Movies', '--index-only', '--skip-drive'] s.test_setup('system_index_movies', args=args, trash_db=True) s.gp.start(s.parsed_args) db = LocalData(s.root) db.cur.execute("SELECT COUNT() FROM SyncFiles WHERE MediaType = 1;") count = db.cur.fetchone() self.assertEqual(count[0], 10) db.cur.execute("SELECT COUNT() FROM AlbumFiles;") count = db.cur.fetchone() self.assertEqual(count[0], 10)
def test_max_retries_hit(self): s = ts.SetupDbAndCredentials() args = ["--skip-albums"] s.test_setup("test_max_retries_hit", args=args, trash_files=True, trash_db=True) s.gp.start(s.parsed_args) db = LocalData(s.root) db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(90, count[0]) pat = str(photos_root / "*" / "*" / "*") self.assertEqual( 9, len(sorted(s.root.glob(pat))), "mismatch on image file count" )
def test_sys_favourites(self): """Download favourite images in test library. """ s = ts.SetupDbAndCredentials() args = ['--favourites-only', '--skip-albums'] s.test_setup('test_sys_favourites', args=args, trash_files=True, trash_db=True) s.gp.start(s.parsed_args) db = LocalData(s.root) # Total of 1 out of media items db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(1, count[0])
def test_system_download_album(self): s = SetupDbAndCredentials() # get a small Album args = ['--album', '2Photos', '--skip-drive'] s.test_setup('system_download_album', args=args, trash_files=True) s.gp.start(s.parsed_args) db = LocalData(s.root) db.cur.execute("SELECT COUNT() FROM SyncFiles WHERE MediaType = 1;") count = db.cur.fetchone() self.assertEqual(count[0], 2) expected_file = os.path.join(s.root, 'albums', '2016', '0109 2Photos') self.assertEqual(True, os.path.exists(expected_file)) pat = os.path.join(s.root, 'picasa', '2016', '01', '*.*') self.assertEqual(2, len(glob.glob(pat)))
def test_sys_archived(self): """Download archived images in test library. """ s = ts.SetupDbAndCredentials() args = ["--archived", "--skip-albums", "--start-date", "2019-10-01"] s.test_setup("test_sys_archived", args=args, trash_files=True, trash_db=True) s.gp.start(s.parsed_args) db = LocalData(s.root) # Total of 1 out of media items db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(1, count[0])
def download_faves(self, expected=4, no_response=False, trash=True): # Download favourite images only in test library. s = ts.SetupDbAndCredentials() args = [ '--album', 'Clones', '--use-flat-path', '--omit-album-date', '--rescan' ] s.test_setup('test_google_albums_sync', args=args, trash_files=trash, trash_db=trash) s.gp.start(s.parsed_args) db = LocalData(s.root) # Total of 1 out of media items db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(expected, count[0])
def test_system_incremental(self): s = ts.SetupDbAndCredentials() args = [ '--start-date', '2016-01-01', '--end-date', '2017-01-01', '--skip-albums', '--index-only' ] s.test_setup('test_system_incremental', args=args, trash_db=True, trash_files=True) s.gp.start(s.parsed_args) db = LocalData(s.root) db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(10, count[0]) # force an update the 'most recently scanned file' record # (this is normally only set for complete scans and was tested in # test_sys_whole_library) db.set_scan_date(Utils.string_to_date("2017-01-01")) db.store() s = ts.SetupDbAndCredentials() args = ['--skip-albums', '--index-only'] s.test_setup('test_system_incremental', args=args) s.gp.start(s.parsed_args) # this should add in everything in 2017 (20 files) db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(30, count[0]) d_date = db.get_scan_date() self.assertEqual(d_date.date(), datetime.date(2017, 9, 26)) s = ts.SetupDbAndCredentials() args = ['--skip-albums', '--index-only', '--rescan'] s.test_setup('test_system_incremental', args=args) s.gp.start(s.parsed_args) # this should add in everything db.cur.execute("SELECT COUNT() FROM SyncFiles") count = db.cur.fetchone() self.assertEqual(80, count[0])
def test_system_index_picasa(self): s = SetupDbAndCredentials() # todo fix for more useful dates when search by create date available args = [ '--start-date', '2016-01-01', '--end-date', '2017-09-19', '--skip-drive', '--index-only', '--skip-video' ] s.test_setup('system_index_picasa', args=args, trash_db=True) s.gp.start(s.parsed_args) db = LocalData(s.root) # album files: 4 albums with 26 files, 6 overlap and 10 are videos = 10 db.cur.execute("SELECT COUNT() FROM SyncFiles WHERE MediaType = 1;") count = db.cur.fetchone() self.assertEqual(count[0], 10) # album files includes the overlaps = 16 db.cur.execute("SELECT COUNT() FROM AlbumFiles;") count = db.cur.fetchone() self.assertEqual(count[0], 16)