async def test_snapshot_expiration(config: Config, time: Time) -> None:
    cache = DataCache(config, time)
    assert len(cache.snapshots) == 0

    cache.snapshot("new")[KEY_LAST_SEEN] = time.now().isoformat()
    cache.snapshot("old")[KEY_LAST_SEEN] = (
        time.now() - timedelta(days=CACHE_EXPIRATION_DAYS + 1)).isoformat()
    cache.makeDirty()
    cache.saveIfDirty()

    assert len(cache.snapshots) == 1
    assert "new" in cache.snapshots
    assert "old" not in cache.snapshots
async def test_bump_last_seen(ha: HaSource, time: Time, config: Config,
                              supervisor: SimulatedSupervisor, model: Model,
                              interceptor, data_cache: DataCache):
    backup = await ha.create(CreateOptions(time.now(), "Test Name"))
    time.advance(days=1)
    assert backup.slug() in await ha.get()
    assert data_cache.backup(
        backup.slug())[KEY_LAST_SEEN] == time.now().isoformat()

    time.advance(days=1)
    assert backup.slug() in await ha.get()
    assert data_cache.backup(
        backup.slug())[KEY_LAST_SEEN] == time.now().isoformat()
async def test_read_and_write(config: Config, time: Time) -> None:
    cache = DataCache(config, time)
    assert len(cache.snapshots) == 0

    cache.snapshot("test")[KEY_CREATED] = time.now().isoformat()
    assert not cache._dirty
    cache.makeDirty()
    assert cache._dirty
    cache.saveIfDirty()
    assert not cache._dirty

    cache = DataCache(config, time)
    assert cache.snapshot("test")[KEY_CREATED] == time.now().isoformat()
    assert not cache._dirty
Esempio n. 4
0
async def test_name_wrong_match_pending_snapshot(
        ha: HaSource, time: Time, config: Config,
        supervisor: SimulatedSupervisor, model: Model, interceptor,
        data_cache: DataCache):
    '''
    A pending snapshots with the wrong name shoudl not be considered made by the addon
    '''
    data_cache.snapshot("pending")[KEY_NAME] = "Test Snapshot"
    data_cache.snapshot("pending")[KEY_CREATED] = time.now().isoformat()
    data_cache.snapshot("pending")[KEY_LAST_SEEN] = time.now().isoformat()

    await supervisor.createSnapshot({"name": "Wrong Name"},
                                    date=time.now() - timedelta(hours=12))

    snapshots = await ha.get()
    assert len(snapshots) == 1
    snapshot = next(iter(snapshots.values()))
    assert not snapshot.madeByTheAddon()
async def test_date_match_wrong_pending_backup(ha: HaSource, time: Time,
                                               config: Config,
                                               supervisor: SimulatedSupervisor,
                                               model: Model, interceptor,
                                               data_cache: DataCache):
    '''
    A pending backups with the same name but with the wrong date shoudl nto be considered made by the addon
    '''
    data_cache.backup("pending")[KEY_NAME] = "Test Backup"
    data_cache.backup("pending")[KEY_CREATED] = time.now().isoformat()
    data_cache.backup("pending")[KEY_LAST_SEEN] = time.now().isoformat()

    await supervisor.createBackup({"name": "Test Backup"},
                                  date=time.now() - timedelta(hours=25))

    backups = await ha.get()
    assert len(backups) == 1
    backups = next(iter(backups.values()))
    assert not backups.madeByTheAddon()
 def __init__(self, config: Config, requests: HaRequests, time: Time):
     super().__init__("StartandStopTimer", self.check, time, 10)
     self.requests = requests
     self.config = config
     self.time = time
     self.must_start = set()
     self.must_enable_watchdog = set()
     self.stop_start_check_time = time.now()
     self._backing_up = False
     self.allow_run = False
     self.lock = Lock()
async def test_ignored_snapshots(updater: HaUpdater, time: Time,
                                 server: SimulationServer, snapshot,
                                 supervisor: SimulatedSupervisor,
                                 coord: Coordinator, config: Config):
    config.override(Setting.IGNORE_OTHER_SNAPSHOTS, True)
    await supervisor.createSnapshot({'name': "test_snapshot"}, date=time.now())
    await coord.sync()
    await updater.update()
    state = supervisor.getAttributes("sensor.snapshot_backup")
    assert state["snapshots_in_google_drive"] == 1
    assert state["snapshots_in_home_assistant"] == 1
    assert len(state["snapshots"]) == 2
Esempio n. 8
0
 def getDriveCreds(self, time: Time) -> Creds:
     return Creds(time, "test_client_id", time.now(), "test_access_token", "test_refresh_token", "test_client_secret")
async def test_version_upgrades(time: Time, injector: Injector,
                                config: Config) -> None:
    # Simluate upgrading from an un-tracked version
    assert not os.path.exists(config.get(Setting.DATA_CACHE_FILE_PATH))
    cache = injector.get(DataCache)
    upgrade_time = time.now()
    assert cache.previousVersion == Version.default()
    assert cache.currentVersion == Version.parse(VERSION)
    assert cache.checkFlag(UpgradeFlags.DONT_IGNORE_LEGACY_SNAPSHOTS)

    assert os.path.exists(config.get(Setting.DATA_CACHE_FILE_PATH))
    with open(config.get(Setting.DATA_CACHE_FILE_PATH)) as f:
        data = json.load(f)
        assert data["upgrades"] == [{
            "prev_version": str(Version.default()),
            "new_version": VERSION,
            "date": upgrade_time.isoformat()
        }]

    # Reload the data cache, verify there is no upgrade.
    time.advance(days=1)
    cache = DataCache(config, time)
    assert cache.previousVersion == Version.parse(VERSION)
    assert cache.currentVersion == Version.parse(VERSION)
    assert not cache.checkFlag(UpgradeFlags.DONT_IGNORE_LEGACY_SNAPSHOTS)
    assert os.path.exists(config.get(Setting.DATA_CACHE_FILE_PATH))

    with open(config.get(Setting.DATA_CACHE_FILE_PATH)) as f:
        data = json.load(f)
        assert data["upgrades"] == [{
            "prev_version": str(Version.default()),
            "new_version": VERSION,
            "date": upgrade_time.isoformat()
        }]

    # simulate upgrading to a new version, verify an upgrade gets identified.
    upgrade_version = Version.parse("200")

    class UpgradeCache(DataCache):
        def __init__(self):
            super().__init__(config, time)

        @property
        def currentVersion(self):
            return upgrade_version

    cache = UpgradeCache()
    assert cache.previousVersion == Version.parse(VERSION)
    assert cache.currentVersion == upgrade_version
    assert os.path.exists(config.get(Setting.DATA_CACHE_FILE_PATH))

    with open(config.get(Setting.DATA_CACHE_FILE_PATH)) as f:
        data = json.load(f)
        assert data["upgrades"] == [{
            "prev_version": str(Version.default()),
            "new_version": VERSION,
            "date": upgrade_time.isoformat()
        }, {
            "prev_version": VERSION,
            "new_version": str(upgrade_version),
            "date": time.now().isoformat()
        }]

    next_upgrade_time = time.now()
    time.advance(days=1)
    # Verify version upgrade time queries work as expected
    assert cache.getUpgradeTime(Version.parse(VERSION)) == upgrade_time
    assert cache.getUpgradeTime(Version.default()) == upgrade_time
    assert cache.getUpgradeTime(upgrade_version) == next_upgrade_time

    # degenerate case, should never happen but a sensible value needs to be returned
    assert cache.getUpgradeTime(Version.parse("201")) == time.now()
async def test_pending_backup_nowait(ha: HaSource, time: Time,
                                     supervisor: SimulatedSupervisor,
                                     interceptor: RequestInterceptor,
                                     config: Config, data_cache: DataCache):
    interceptor.setSleep(URL_MATCH_BACKUP_FULL, sleep=5)
    config.override(Setting.NEW_BACKUP_TIMEOUT_SECONDS, 0.1)
    backup_immediate: PendingBackup = await ha.create(
        CreateOptions(time.now(), "Test Name"))
    assert isinstance(backup_immediate, PendingBackup)
    backup_pending: HABackup = (await ha.get())['pending']

    assert isinstance(backup_immediate, PendingBackup)
    assert isinstance(backup_pending, PendingBackup)
    assert backup_immediate is backup_pending
    assert backup_immediate.name() == "Test Name"
    assert backup_immediate.slug() == "pending"
    assert not backup_immediate.uploadable()
    assert backup_immediate.backupType() == "Full"
    assert backup_immediate.source() == SOURCE_HA
    assert backup_immediate.date() == time.now()
    assert not backup_immediate.protected()
    assert not backup_immediate.ignore()
    assert backup_immediate.madeByTheAddon()
    assert data_cache.backup("pending") == {
        KEY_CREATED: time.now().isoformat(),
        KEY_LAST_SEEN: time.now().isoformat(),
        KEY_NAME: "Test Name"
    }

    # Might be a little flaky but...whatever
    await asyncio.wait({ha._pending_backup_task})

    backups = await ha.get()
    assert 'pending' not in backups
    assert len(backups) == 1
    backup = next(iter(backups.values()))
    assert isinstance(backup, HABackup)
    assert not backup.ignore()
    assert backup.madeByTheAddon()
    assert data_cache.backup(
        backup.slug())[KEY_LAST_SEEN] == time.now().isoformat()
    assert "pending" not in data_cache.backups

    return
    # ignroe events for now
    assert supervisor.getEvents() == [(EVENT_BACKUP_START, {
        'backup_name':
        backup_immediate.name(),
        'backup_time':
        str(backup_immediate.date())
    })]
    ha.backup_thread.join()
    assert supervisor.getEvents() == [(EVENT_BACKUP_START, {
        'backup_name':
        backup_immediate.name(),
        'backup_time':
        str(backup_immediate.date())
    }),
                                      (EVENT_BACKUP_END, {
                                          'completed':
                                          True,
                                          'backup_name':
                                          backup_immediate.name(),
                                          'backup_time':
                                          str(backup_immediate.date())
                                      })]