async def test_snapshot_to_backup_upgrade_use_old_values(
        reader: ReaderHelper, time: FakeTime, coord: Coordinator,
        config: Config, supervisor: SimulatedSupervisor, ha: HaSource,
        drive: DriveSource, data_cache: DataCache, updater: HaUpdater):
    """ Test the path where a user upgrades from the addon before the backup rename and then chooses to use the old names"""
    status = await reader.getjson("getstatus")
    assert not status["warn_backup_upgrade"]

    # simulate upgrading config
    supervisor._options = {Setting.DEPRECTAED_MAX_BACKUPS_IN_HA.value: 7}
    await coord.sync()
    assert Setting.CALL_BACKUP_SNAPSHOT.value in supervisor._options
    assert config.get(Setting.CALL_BACKUP_SNAPSHOT)

    status = await reader.getjson("getstatus")
    assert status["warn_backup_upgrade"]
    assert not data_cache.checkFlag(UpgradeFlags.NOTIFIED_ABOUT_BACKUP_RENAME)
    assert not updater._trigger_once

    # simulate user clicking the button to use new names
    assert await reader.getjson("callbackupsnapshot?switch=false") == {
        'message': 'Configuration updated'
    }
    assert data_cache.checkFlag(UpgradeFlags.NOTIFIED_ABOUT_BACKUP_RENAME)
    status = await reader.getjson("getstatus")
    assert not status["warn_backup_upgrade"]
    assert config.get(Setting.CALL_BACKUP_SNAPSHOT)
async def test_bump_last_seen(ha: HaSource, time: Time, config: Config,
                              supervisor: SimulatedSupervisor, model: Model,
                              interceptor, data_cache: DataCache):
    backup = await ha.create(CreateOptions(time.now(), "Test Name"))
    time.advance(days=1)
    assert backup.slug() in await ha.get()
    assert data_cache.backup(
        backup.slug())[KEY_LAST_SEEN] == time.now().isoformat()

    time.advance(days=1)
    assert backup.slug() in await ha.get()
    assert data_cache.backup(
        backup.slug())[KEY_LAST_SEEN] == time.now().isoformat()
async def test_snapshot_expiration(config: Config, time: Time) -> None:
    cache = DataCache(config, time)
    assert len(cache.snapshots) == 0

    cache.snapshot("new")[KEY_LAST_SEEN] = time.now().isoformat()
    cache.snapshot("old")[KEY_LAST_SEEN] = (
        time.now() - timedelta(days=CACHE_EXPIRATION_DAYS + 1)).isoformat()
    cache.makeDirty()
    cache.saveIfDirty()

    assert len(cache.snapshots) == 1
    assert "new" in cache.snapshots
    assert "old" not in cache.snapshots
예제 #4
0
async def test_name_wrong_match_pending_snapshot(
        ha: HaSource, time: Time, config: Config,
        supervisor: SimulatedSupervisor, model: Model, interceptor,
        data_cache: DataCache):
    '''
    A pending snapshots with the wrong name shoudl not be considered made by the addon
    '''
    data_cache.snapshot("pending")[KEY_NAME] = "Test Snapshot"
    data_cache.snapshot("pending")[KEY_CREATED] = time.now().isoformat()
    data_cache.snapshot("pending")[KEY_LAST_SEEN] = time.now().isoformat()

    await supervisor.createSnapshot({"name": "Wrong Name"},
                                    date=time.now() - timedelta(hours=12))

    snapshots = await ha.get()
    assert len(snapshots) == 1
    snapshot = next(iter(snapshots.values()))
    assert not snapshot.madeByTheAddon()
async def test_date_match_wrong_pending_backup(ha: HaSource, time: Time,
                                               config: Config,
                                               supervisor: SimulatedSupervisor,
                                               model: Model, interceptor,
                                               data_cache: DataCache):
    '''
    A pending backups with the same name but with the wrong date shoudl nto be considered made by the addon
    '''
    data_cache.backup("pending")[KEY_NAME] = "Test Backup"
    data_cache.backup("pending")[KEY_CREATED] = time.now().isoformat()
    data_cache.backup("pending")[KEY_LAST_SEEN] = time.now().isoformat()

    await supervisor.createBackup({"name": "Test Backup"},
                                  date=time.now() - timedelta(hours=25))

    backups = await ha.get()
    assert len(backups) == 1
    backups = next(iter(backups.values()))
    assert not backups.madeByTheAddon()
예제 #6
0
async def test_dirty_cache_gets_saved(time: FakeTime, model: Model,
                                      data_cache: DataCache):
    data_cache.makeDirty()
    await model.sync(time.now())
    assert not data_cache.dirty
async def test_version_upgrades(time: Time, injector: Injector,
                                config: Config) -> None:
    # Simluate upgrading from an un-tracked version
    assert not os.path.exists(config.get(Setting.DATA_CACHE_FILE_PATH))
    cache = injector.get(DataCache)
    upgrade_time = time.now()
    assert cache.previousVersion == Version.default()
    assert cache.currentVersion == Version.parse(VERSION)
    assert cache.checkFlag(UpgradeFlags.DONT_IGNORE_LEGACY_SNAPSHOTS)

    assert os.path.exists(config.get(Setting.DATA_CACHE_FILE_PATH))
    with open(config.get(Setting.DATA_CACHE_FILE_PATH)) as f:
        data = json.load(f)
        assert data["upgrades"] == [{
            "prev_version": str(Version.default()),
            "new_version": VERSION,
            "date": upgrade_time.isoformat()
        }]

    # Reload the data cache, verify there is no upgrade.
    time.advance(days=1)
    cache = DataCache(config, time)
    assert cache.previousVersion == Version.parse(VERSION)
    assert cache.currentVersion == Version.parse(VERSION)
    assert not cache.checkFlag(UpgradeFlags.DONT_IGNORE_LEGACY_SNAPSHOTS)
    assert os.path.exists(config.get(Setting.DATA_CACHE_FILE_PATH))

    with open(config.get(Setting.DATA_CACHE_FILE_PATH)) as f:
        data = json.load(f)
        assert data["upgrades"] == [{
            "prev_version": str(Version.default()),
            "new_version": VERSION,
            "date": upgrade_time.isoformat()
        }]

    # simulate upgrading to a new version, verify an upgrade gets identified.
    upgrade_version = Version.parse("200")

    class UpgradeCache(DataCache):
        def __init__(self):
            super().__init__(config, time)

        @property
        def currentVersion(self):
            return upgrade_version

    cache = UpgradeCache()
    assert cache.previousVersion == Version.parse(VERSION)
    assert cache.currentVersion == upgrade_version
    assert os.path.exists(config.get(Setting.DATA_CACHE_FILE_PATH))

    with open(config.get(Setting.DATA_CACHE_FILE_PATH)) as f:
        data = json.load(f)
        assert data["upgrades"] == [{
            "prev_version": str(Version.default()),
            "new_version": VERSION,
            "date": upgrade_time.isoformat()
        }, {
            "prev_version": VERSION,
            "new_version": str(upgrade_version),
            "date": time.now().isoformat()
        }]

    next_upgrade_time = time.now()
    time.advance(days=1)
    # Verify version upgrade time queries work as expected
    assert cache.getUpgradeTime(Version.parse(VERSION)) == upgrade_time
    assert cache.getUpgradeTime(Version.default()) == upgrade_time
    assert cache.getUpgradeTime(upgrade_version) == next_upgrade_time

    # degenerate case, should never happen but a sensible value needs to be returned
    assert cache.getUpgradeTime(Version.parse("201")) == time.now()
async def test_read_and_write(config: Config, time: Time) -> None:
    cache = DataCache(config, time)
    assert len(cache.snapshots) == 0

    cache.snapshot("test")[KEY_CREATED] = time.now().isoformat()
    assert not cache._dirty
    cache.makeDirty()
    assert cache._dirty
    cache.saveIfDirty()
    assert not cache._dirty

    cache = DataCache(config, time)
    assert cache.snapshot("test")[KEY_CREATED] == time.now().isoformat()
    assert not cache._dirty
async def test_pending_backup_nowait(ha: HaSource, time: Time,
                                     supervisor: SimulatedSupervisor,
                                     interceptor: RequestInterceptor,
                                     config: Config, data_cache: DataCache):
    interceptor.setSleep(URL_MATCH_BACKUP_FULL, sleep=5)
    config.override(Setting.NEW_BACKUP_TIMEOUT_SECONDS, 0.1)
    backup_immediate: PendingBackup = await ha.create(
        CreateOptions(time.now(), "Test Name"))
    assert isinstance(backup_immediate, PendingBackup)
    backup_pending: HABackup = (await ha.get())['pending']

    assert isinstance(backup_immediate, PendingBackup)
    assert isinstance(backup_pending, PendingBackup)
    assert backup_immediate is backup_pending
    assert backup_immediate.name() == "Test Name"
    assert backup_immediate.slug() == "pending"
    assert not backup_immediate.uploadable()
    assert backup_immediate.backupType() == "Full"
    assert backup_immediate.source() == SOURCE_HA
    assert backup_immediate.date() == time.now()
    assert not backup_immediate.protected()
    assert not backup_immediate.ignore()
    assert backup_immediate.madeByTheAddon()
    assert data_cache.backup("pending") == {
        KEY_CREATED: time.now().isoformat(),
        KEY_LAST_SEEN: time.now().isoformat(),
        KEY_NAME: "Test Name"
    }

    # Might be a little flaky but...whatever
    await asyncio.wait({ha._pending_backup_task})

    backups = await ha.get()
    assert 'pending' not in backups
    assert len(backups) == 1
    backup = next(iter(backups.values()))
    assert isinstance(backup, HABackup)
    assert not backup.ignore()
    assert backup.madeByTheAddon()
    assert data_cache.backup(
        backup.slug())[KEY_LAST_SEEN] == time.now().isoformat()
    assert "pending" not in data_cache.backups

    return
    # ignroe events for now
    assert supervisor.getEvents() == [(EVENT_BACKUP_START, {
        'backup_name':
        backup_immediate.name(),
        'backup_time':
        str(backup_immediate.date())
    })]
    ha.backup_thread.join()
    assert supervisor.getEvents() == [(EVENT_BACKUP_START, {
        'backup_name':
        backup_immediate.name(),
        'backup_time':
        str(backup_immediate.date())
    }),
                                      (EVENT_BACKUP_END, {
                                          'completed':
                                          True,
                                          'backup_name':
                                          backup_immediate.name(),
                                          'backup_time':
                                          str(backup_immediate.date())
                                      })]
예제 #10
0
async def test_flag(config: Config, time: Time):
    cache = DataCache(config, time)
    assert not cache.checkFlag(UpgradeFlags.TESTING_FLAG)
    assert not cache.dirty

    cache.addFlag(UpgradeFlags.TESTING_FLAG)
    assert cache.dirty
    assert cache.checkFlag(UpgradeFlags.TESTING_FLAG)
    cache.saveIfDirty()

    cache = DataCache(config, time)
    assert cache.checkFlag(UpgradeFlags.TESTING_FLAG)