async def test_update_ignore(reader: ReaderHelper, time: FakeTime, coord: Coordinator, config: Config, supervisor: SimulatedSupervisor, ha: HaSource, drive: DriveSource): config.override(Setting.IGNORE_UPGRADE_BACKUPS, True) config.override(Setting.DAYS_BETWEEN_BACKUPS, 0) # make an ignored_backup slug = await supervisor.createBackup( { 'name': "Ignore_me", 'folders': ['homeassistant'], 'addons': [] }, date=time.now()) await coord.sync() assert len(await drive.get()) == 0 assert len(await ha.get()) == 1 assert len(coord.backups()) == 1 # Disable Drive Upload update = { "ignore": False, "slug": slug, } await reader.postjson("ignore", json=update) await coord.waitForSyncToFinish() assert len(coord.backups()) == 1 assert len(await drive.get()) == 1 assert len(await ha.get()) == 1
async def test_update_disable_drive(reader: ReaderHelper, server, coord: Coordinator, config: Config, drive_requests: DriveRequests): # Disable drive drive_requests.creds = None os.remove(config.get(Setting.CREDENTIALS_FILE_PATH)) assert not coord.enabled() await coord.sync() assert len(coord.backups()) == 0 # Disable Drive Upload update = { "config": { Setting.ENABLE_DRIVE_UPLOAD.value: False }, "backup_folder": "" } assert await reader.postjson("saveconfig", json=update) == { 'message': 'Settings saved', "reload_page": True } assert config.get(Setting.ENABLE_DRIVE_UPLOAD) is False # Verify the app is working fine. assert coord.enabled() await coord.waitForSyncToFinish() assert len(coord.backups()) == 1
async def test_sync(reader, ui_server, coord: Coordinator, time: FakeTime, session): assert len(coord.backups()) == 0 status = await reader.getjson("sync") assert len(coord.backups()) == 1 assert status == await reader.getjson("getstatus") time.advance(days=7) assert len((await reader.getjson("sync"))['backups']) == 2
async def test_new_backup(coord: Coordinator, time: FakeTime, source, dest): await coord.startBackup(CreateOptions(time.now(), "Test Name")) backups = coord.backups() assert len(backups) == 1 assert backups[0].name() == "Test Name" assert backups[0].getSource(source.name()) is not None assert backups[0].getSource(dest.name()) is None
async def test_delete(coord: Coordinator, backup, source, dest): assert backup.getSource(source.name()) is not None assert backup.getSource(dest.name()) is not None await coord.delete([source.name()], backup.slug()) assert len(coord.backups()) == 1 assert backup.getSource(source.name()) is None assert backup.getSource(dest.name()) is not None await coord.delete([dest.name()], backup.slug()) assert backup.getSource(source.name()) is None assert backup.getSource(dest.name()) is None assert backup.isDeleted() assert len(coord.backups()) == 0 await coord.sync() assert len(coord.backups()) == 1 await coord.delete([source.name(), dest.name()], coord.backups()[0].slug()) assert len(coord.backups()) == 0
async def test_check_size_sync(coord: Coordinator, source: HelperTestSource, dest: HelperTestSource, time, fs: FsFaker, global_info: GlobalInfo): skipForWindows() fs.setFreeBytes(0) await coord.sync() assert len(coord.backups()) == 0 assert global_info._last_error is not None await coord.sync() assert len(coord.backups()) == 0 assert global_info._last_error is not None # Verify it resets the global size skip check, but gets through once global_info.setSkipSpaceCheckOnce(True) await coord.sync() assert len(coord.backups()) == 1 assert global_info._last_error is None assert not global_info.isSkipSpaceCheckOnce() # Next attempt to backup shoudl fail again. time.advance(days=7) await coord.sync() assert len(coord.backups()) == 1 assert global_info._last_error is not None
async def test_disabled_at_install(coord: Coordinator, dest, time): """ Verifies that at install time, if some backups are already present the addon doesn't try to sync over and over when drive is disabled. This was a problem at one point. """ dest.setEnabled(True) await coord.sync() assert len(coord.backups()) == 1 dest.setEnabled(False) time.advance(days=5) assert coord.check() await coord.sync() assert not coord.check()
async def test_backup_now(reader, ui_server, time: FakeTime, backup: Backup, coord: Coordinator): assert len(coord.backups()) == 1 assert (await reader.getjson("getstatus"))["backups"][0]["date"] == time.toLocal( time.now()).strftime("%c") time.advance(hours=1) assert await reader.getjson( "backup?custom_name=TestName&retain_drive=False&retain_ha=False") == { 'message': "Requested backup 'TestName'" } status = await reader.getjson('getstatus') assert len(status["backups"]) == 2 assert status["backups"][1]["date"] == time.toLocal( time.now()).strftime("%c") assert status["backups"][1]["name"] == "TestName" assert status["backups"][1]['sources'][0]['retained'] is False assert len(status["backups"][1]['sources']) == 1 time.advance(hours=1) assert await reader.getjson( "backup?custom_name=TestName2&retain_drive=True&retain_ha=False") == { 'message': "Requested backup 'TestName2'" } await coord.sync() status = await reader.getjson('getstatus') assert len(status["backups"]) == 3 assert status["backups"][2]["date"] == time.toLocal( time.now()).strftime("%c") assert status["backups"][2]["name"] == "TestName2" assert status["backups"][2]['sources'][0]['retained'] is False assert status["backups"][2]['sources'][1]['retained'] is True time.advance(hours=1) assert await reader.getjson( "backup?custom_name=TestName3&retain_drive=False&retain_ha=True") == { 'message': "Requested backup 'TestName3'" } await coord.sync() status = await reader.getjson('getstatus') assert len(status["backups"]) == 4 assert status["backups"][3]['sources'][0]['retained'] is True assert status["backups"][3]['sources'][1]['retained'] is False assert status["backups"][3]["date"] == time.toLocal( time.now()).strftime("%c") assert status["backups"][3]["name"] == "TestName3"
async def test_sync(coord: Coordinator, global_info: GlobalInfo, time: FakeTime): await coord.sync() assert global_info._syncs == 1 assert global_info._successes == 1 assert global_info._last_sync_start == time.now() assert len(coord.backups()) == 1
async def test_only_source_configured(coord: Coordinator, dest: HelperTestSource, time, source: HelperTestSource): source.setEnabled(True) dest.setEnabled(False) dest.setNeedsConfiguration(False) await coord.sync() assert len(coord.backups()) == 1
async def test_freshness(coord: Coordinator, source: HelperTestSource, dest: HelperTestSource, backup: Backup, time: FakeTime): source.setMax(2) dest.setMax(2) await coord.sync() assert backup.getPurges() == { source.name(): False, dest.name(): False } source.setMax(1) dest.setMax(1) await coord.sync() assert backup.getPurges() == { source.name(): True, dest.name(): True } dest.setMax(0) await coord.sync() assert backup.getPurges() == { source.name(): True, dest.name(): False } source.setMax(0) await coord.sync() assert backup.getPurges() == { source.name(): False, dest.name(): False } source.setMax(2) dest.setMax(2) time.advance(days=7) await coord.sync() assert len(coord.backups()) == 2 assert backup.getPurges() == { source.name(): True, dest.name(): True } assert coord.backups()[1].getPurges() == { source.name(): False, dest.name(): False } # should refresh on delete source.setMax(1) dest.setMax(1) await coord.delete([source.name()], backup.slug()) assert coord.backups()[0].getPurges() == { dest.name(): True } assert coord.backups()[1].getPurges() == { source.name(): True, dest.name(): False } # should update on retain await coord.retain({dest.name(): True}, backup.slug()) assert coord.backups()[0].getPurges() == { dest.name(): False } assert coord.backups()[1].getPurges() == { source.name(): True, dest.name(): True } # should update on upload await coord.uploadBackups(coord.backups()[0].slug()) assert coord.backups()[0].getPurges() == { dest.name(): False, source.name(): True } assert coord.backups()[1].getPurges() == { source.name(): False, dest.name(): True }