async def test_backup_supervisor_path(ha: HaSource, supervisor: SimulatedSupervisor, interceptor: RequestInterceptor): supervisor._super_version = Version(2021, 7) await ha.get() assert not interceptor.urlWasCalled(URL_MATCH_BACKUPS) assert interceptor.urlWasCalled(URL_MATCH_SNAPSHOT)
async def test_resume_session_abandoned_after_a_long_time( time: FakeTime, drive: DriveSource, config: Config, server: SimulationServer, snapshot_helper, interceptor: RequestInterceptor): from_snapshot, data = await snapshot_helper.createFile() # Configure the upload to fail after the first upload chunk interceptor.setError(URL_MATCH_UPLOAD_PROGRESS, 501, 1) with pytest.raises(ClientResponseError): await drive.save(from_snapshot, data) # Verify it reuses the session a few times assert server.wasUrlRequested(URL_START_UPLOAD) assert drive.drivebackend.last_attempt_count == 1 assert drive.drivebackend.last_attempt_location is not None assert drive.drivebackend.last_attempt_metadata is not None data.position(0) with pytest.raises(ClientResponseError): await drive.save(from_snapshot, data) assert drive.drivebackend.last_attempt_count == 2 assert drive.drivebackend.last_attempt_location is not None assert drive.drivebackend.last_attempt_metadata is not None last_location = drive.drivebackend.last_attempt_location # Fast forward a lot, then verify the session is restarted server.urls.clear() interceptor.clear() time.advance(duration=UPLOAD_SESSION_EXPIRATION_DURATION) data.position(0) await drive.save(from_snapshot, data) assert interceptor.urlWasCalled(URL_START_UPLOAD) assert not interceptor.urlWasCalled(last_location)
async def test_start_on_boot(ha: HaSource, time, interceptor: RequestInterceptor, config: Config, supervisor: SimulatedSupervisor) -> None: boot_slug = "boot_slug" supervisor.installAddon(boot_slug, "Start on boot", boot=True, started=False) no_boot_slug = "no_boot_slug" supervisor.installAddon(no_boot_slug, "Don't start on boot", boot=False, started=False) config.override(Setting.STOP_ADDONS, ",".join([boot_slug, no_boot_slug])) config.override(Setting.NEW_SNAPSHOT_TIMEOUT_SECONDS, 0.001) assert supervisor.addon(boot_slug)["state"] == "stopped" assert supervisor.addon(no_boot_slug)["state"] == "stopped" async with supervisor._snapshot_inner_lock: await ha.create(CreateOptions(time.now(), "Test Name")) assert supervisor.addon(boot_slug)["state"] == "stopped" assert supervisor.addon(no_boot_slug)["state"] == "stopped" await ha._pending_snapshot_task assert supervisor.addon(boot_slug)["state"] == "started" assert supervisor.addon(no_boot_slug)["state"] == "stopped" assert len(await ha.get()) == 1 assert not interceptor.urlWasCalled(URL_MATCH_START_ADDON) assert not interceptor.urlWasCalled(URL_MATCH_STOP_ADDON)
async def test_chunk_upload_resets_attempt_counter(time: FakeTime, drive: DriveSource, config: Config, server: SimulationServer, backup_helper: BackupHelper, interceptor: RequestInterceptor): from_backup, data = await backup_helper.createFile(size=1024 * 1024 * 10) # Configure the upload to fail after the first upload chunk interceptor.setError(URL_MATCH_UPLOAD_PROGRESS, 501, 1) with pytest.raises(ClientResponseError): await drive.save(from_backup, data) data.position(0) with pytest.raises(ClientResponseError): await drive.save(from_backup, data) # Verify the session was started assert interceptor.urlWasCalled(URL_START_UPLOAD) assert interceptor.urlWasCalled(URL_MATCH_UPLOAD_PROGRESS) assert drive.drivebackend.last_attempt_count == 2 location = drive.drivebackend.last_attempt_location assert location is not None # Allow one more chunk to succeed interceptor.clear() interceptor.setError(URL_MATCH_UPLOAD_PROGRESS, 501, 2) data.position(0) with pytest.raises(ClientResponseError): await drive.save(from_backup, data) # Verify the session was reused and the attempt counter was reset assert not interceptor.urlWasCalled(URL_START_UPLOAD) assert interceptor.urlWasCalled(URL_MATCH_UPLOAD_PROGRESS) assert interceptor.urlWasCalled(URL(location).path) assert drive.drivebackend.last_attempt_count == 1 assert drive.drivebackend.last_attempt_location == location
async def test_upgrade_all_config(ha: HaSource, supervisor: SimulatedSupervisor, interceptor: RequestInterceptor, config: Config, server_url): """Verify that converting all upgradeable config optiosn works as expected""" # overwrite the addon options with old values supervisor._options = { Setting.DEPRECTAED_MAX_BACKUPS_IN_HA.value: 1, Setting.DEPRECTAED_MAX_BACKUPS_IN_GOOGLE_DRIVE.value: 2, Setting.DEPRECATED_DAYS_BETWEEN_BACKUPS.value: 5, Setting.DEPRECTAED_IGNORE_OTHER_BACKUPS.value: True, Setting.DEPRECTAED_IGNORE_UPGRADE_BACKUPS.value: True, Setting.DEPRECTAED_BACKUP_TIME_OF_DAY.value: "01:11", Setting.DEPRECTAED_DELETE_BEFORE_NEW_BACKUP.value: True, Setting.DEPRECTAED_BACKUP_NAME.value: "test", Setting.DEPRECTAED_SPECIFY_BACKUP_FOLDER.value: True, Setting.DEPRECTAED_NOTIFY_FOR_STALE_BACKUPS.value: False, Setting.DEPRECTAED_ENABLE_BACKUP_STALE_SENSOR.value: False, Setting.DEPRECTAED_ENABLE_BACKUP_STATE_SENSOR.value: False, Setting.DEPRECATED_BACKUP_PASSWORD.value: "test password", } await ha.init() assert not config.mustSaveUpgradeChanges() assert interceptor.urlWasCalled(URL_MATCH_SELF_OPTIONS) # Verify the config was upgraded assert supervisor._options == { Setting.MAX_BACKUPS_IN_HA.value: 1, Setting.MAX_BACKUPS_IN_GOOGLE_DRIVE.value: 2, Setting.DAYS_BETWEEN_BACKUPS.value: 5, Setting.IGNORE_OTHER_BACKUPS.value: True, Setting.IGNORE_UPGRADE_BACKUPS.value: True, Setting.BACKUP_TIME_OF_DAY.value: "01:11", Setting.DELETE_BEFORE_NEW_BACKUP.value: True, Setting.BACKUP_NAME.value: "test", Setting.SPECIFY_BACKUP_FOLDER.value: True, Setting.NOTIFY_FOR_STALE_BACKUPS.value: False, Setting.ENABLE_BACKUP_STALE_SENSOR.value: False, Setting.ENABLE_BACKUP_STATE_SENSOR.value: False, Setting.BACKUP_PASSWORD.value: "test password", Setting.CALL_BACKUP_SNAPSHOT.value: True, } interceptor.clear() await ha.init() assert not interceptor.urlWasCalled(URL_MATCH_SELF_OPTIONS)
async def test_upgrade_no_config(ha: HaSource, supervisor: SimulatedSupervisor, interceptor: RequestInterceptor, config: Config, server_url): """Verifies that config not in need of an upgrade doesn't get upgraded""" # overwrite the addon options with old values supervisor._options = { Setting.MAX_BACKUPS_IN_HA.value: 4, Setting.MAX_BACKUPS_IN_GOOGLE_DRIVE.value: 4, Setting.DAYS_BETWEEN_BACKUPS.value: 3, Setting.BACKUP_TIME_OF_DAY.value: "01:11", Setting.EXCLUDE_ADDONS.value: "test" } await ha.init() assert not config.mustSaveUpgradeChanges() assert not interceptor.urlWasCalled(URL_MATCH_SELF_OPTIONS) # Verify the config was upgraded assert supervisor._options == { Setting.MAX_BACKUPS_IN_HA.value: 4, Setting.MAX_BACKUPS_IN_GOOGLE_DRIVE.value: 4, Setting.DAYS_BETWEEN_BACKUPS.value: 3, Setting.BACKUP_TIME_OF_DAY.value: "01:11", Setting.EXCLUDE_ADDONS.value: "test", }
async def test_upgrade_some_config(ha: HaSource, supervisor: SimulatedSupervisor, interceptor: RequestInterceptor, config: Config, server_url): """Verify that converting a mix of upgradeable and not upgradeable config works""" # overwrite the addon options with old values supervisor._options = { Setting.DEPRECTAED_MAX_BACKUPS_IN_HA.value: 4, Setting.DEPRECTAED_MAX_BACKUPS_IN_GOOGLE_DRIVE.value: 4, Setting.DEPRECATED_DAYS_BETWEEN_BACKUPS.value: 3, Setting.DEPRECTAED_BACKUP_TIME_OF_DAY.value: "01:11", Setting.EXCLUDE_ADDONS.value: "test", Setting.USE_SSL.value: False, } await ha.init() assert not config.mustSaveUpgradeChanges() assert interceptor.urlWasCalled(URL_MATCH_SELF_OPTIONS) # Verify the config was upgraded assert supervisor._options == { Setting.MAX_BACKUPS_IN_HA.value: 4, Setting.MAX_BACKUPS_IN_GOOGLE_DRIVE.value: 4, Setting.DAYS_BETWEEN_BACKUPS.value: 3, Setting.EXCLUDE_ADDONS.value: "test", Setting.BACKUP_TIME_OF_DAY.value: "01:11", Setting.CALL_BACKUP_SNAPSHOT.value: True, }
async def test_upgrade_default_config(ha: HaSource, supervisor: SimulatedSupervisor, interceptor: RequestInterceptor, config: Config, server_url): """Verify that converting the original default config optiosn works as expected""" # overwrite the addon options with old values supervisor._options = { Setting.DEPRECTAED_MAX_BACKUPS_IN_HA.value: 4, Setting.DEPRECTAED_MAX_BACKUPS_IN_GOOGLE_DRIVE.value: 4, Setting.DEPRECATED_DAYS_BETWEEN_BACKUPS.value: 3, Setting.USE_SSL.value: False, } await ha.init() assert not config.mustSaveUpgradeChanges() assert interceptor.urlWasCalled(URL_MATCH_SELF_OPTIONS) # Verify the config was upgraded assert supervisor._options == { Setting.MAX_BACKUPS_IN_HA.value: 4, Setting.MAX_BACKUPS_IN_GOOGLE_DRIVE.value: 4, Setting.DAYS_BETWEEN_BACKUPS.value: 3, Setting.CALL_BACKUP_SNAPSHOT.value: True, }
async def test_ingore_self_when_stopping( ha: HaSource, time, interceptor: RequestInterceptor, config: Config, supervisor: SimulatedSupervisor) -> None: slug = supervisor._addon_slug config.override(Setting.STOP_ADDONS, slug) config.override(Setting.NEW_SNAPSHOT_TIMEOUT_SECONDS, 0.001) interceptor.setError(URL_MATCH_START_ADDON, 400) assert supervisor.addon(slug)["state"] == "started" async with supervisor._snapshot_inner_lock: await ha.create(CreateOptions(time.now(), "Test Name")) assert supervisor.addon(slug)["state"] == "started" await ha._pending_snapshot_task assert supervisor.addon(slug)["state"] == "started" assert not interceptor.urlWasCalled(URL_MATCH_START_ADDON) assert not interceptor.urlWasCalled(URL_MATCH_STOP_ADDON) assert len(await ha.get()) == 1
async def test_do_nothing_while_snapshotting( supervisor: SimulatedSupervisor, addon_stopper: AddonStopper, config: Config, interceptor: RequestInterceptor) -> None: slug1 = "test_slug_1" supervisor.installAddon(slug1, "Test decription") slug2 = "test_slug_2" supervisor.installAddon(slug2, "Test decription") config.override(Setting.STOP_ADDONS, ",".join([slug1, slug2])) await addon_stopper.start(False) addon_stopper.allowRun() addon_stopper.isSnapshotting(True) assert addon_stopper.must_start == {slug1, slug2} await addon_stopper.check() assert not interceptor.urlWasCalled(URL_MATCH_START_ADDON) assert not interceptor.urlWasCalled(URL_MATCH_STOP_ADDON)
async def test_old_delete_path(ha: HaSource, supervisor: SimulatedSupervisor, interceptor: RequestInterceptor, time: FakeTime): supervisor._super_version = Version(2020, 8) await ha.get() backup: HABackup = await ha.create(CreateOptions(time.now(), "Test Name")) full = DummyBackup(backup.name(), backup.date(), backup.size(), backup.slug(), "dummy") full.addSource(backup) await ha.delete(full) assert interceptor.urlWasCalled("/snapshots/{0}/remove".format( backup.slug()))
async def test_get_info_failure_on_stop( supervisor: SimulatedSupervisor, addon_stopper: AddonStopper, config: Config, interceptor: RequestInterceptor) -> None: slug1 = "test_slug_1" supervisor.installAddon(slug1, "Test decription") config.override(Setting.STOP_ADDONS, slug1) addon_stopper.allowRun() addon_stopper.must_start = set() assert supervisor.addon(slug1)["state"] == "started" interceptor.setError(URL_MATCH_ADDON_INFO, 400) await addon_stopper.stopAddons("ignore") assert interceptor.urlWasCalled(URL_MATCH_ADDON_INFO) assert getSaved(config) == (set(), set()) assert supervisor.addon(slug1)["state"] == "started" await addon_stopper.check() await addon_stopper.startAddons() assert supervisor.addon(slug1)["state"] == "started" assert getSaved(config) == (set(), set())
async def test_resume_session_reused_on_http408(time, drive: DriveSource, config: Config, server: SimulationServer, backup_helper: BackupHelper, interceptor: RequestInterceptor): from_backup, data = await backup_helper.createFile() # Configure the upload to fail interceptor.setError(URL_MATCH_UPLOAD_PROGRESS, 408) with pytest.raises(GoogleTimeoutError): await drive.save(from_backup, data) # Verify a requst was made to start the upload assert server.wasUrlRequested(URL_START_UPLOAD) location = drive.drivebackend.last_attempt_location assert location is not None server.urls.clear() interceptor.clear() data.position(0) await drive.save(from_backup, data) assert interceptor.urlWasCalled(URL(location).path)
async def test_start_failure(supervisor: SimulatedSupervisor, addon_stopper: AddonStopper, config: Config, interceptor: RequestInterceptor, time: FakeTime) -> None: slug1 = "test_slug_1" supervisor.installAddon(slug1, "Test decription") config.override(Setting.STOP_ADDONS, ",".join([slug1])) addon_stopper.allowRun() addon_stopper.must_start = set() assert supervisor.addon(slug1)["state"] == "started" await addon_stopper.stopAddons("ignore") assert supervisor.addon(slug1)["state"] == "stopped" await addon_stopper.check() assert getSaved(config) == ({slug1}, set()) assert supervisor.addon(slug1)["state"] == "stopped" interceptor.setError(URL_MATCH_START_ADDON, 400) await addon_stopper.startAddons() assert getSaved(config) == (set(), set()) assert interceptor.urlWasCalled(URL_MATCH_START_ADDON) assert supervisor.addon(slug1)["state"] == "stopped"