async def test_snapshot_expiration(config: Config, time: Time) -> None:
    cache = DataCache(config, time)
    assert len(cache.snapshots) == 0

    cache.snapshot("new")[KEY_LAST_SEEN] = time.now().isoformat()
    cache.snapshot("old")[KEY_LAST_SEEN] = (
        time.now() - timedelta(days=CACHE_EXPIRATION_DAYS + 1)).isoformat()
    cache.makeDirty()
    cache.saveIfDirty()

    assert len(cache.snapshots) == 1
    assert "new" in cache.snapshots
    assert "old" not in cache.snapshots
def test_trivial_oldest(time: Time) -> None:
    config = GenConfig(days=1)
    scheme = GenerationalScheme(time, config, count=0)

    backups = [
        makeBackup("test", time.local(1985, 12, 6, 10)),
        makeBackup("test", time.local(1985, 12, 6, 12)),
        makeBackup("test", time.local(1985, 12, 6, 13))
    ]
    assertRemovalOrder(scheme, backups, [
        time.local(1985, 12, 6, 10),
        time.local(1985, 12, 6, 12),
        time.local(1985, 12, 6, 13)
    ])
async def test_read_and_write(config: Config, time: Time) -> None:
    cache = DataCache(config, time)
    assert len(cache.snapshots) == 0

    cache.snapshot("test")[KEY_CREATED] = time.now().isoformat()
    assert not cache._dirty
    cache.makeDirty()
    assert cache._dirty
    cache.saveIfDirty()
    assert not cache._dirty

    cache = DataCache(config, time)
    assert cache.snapshot("test")[KEY_CREATED] == time.now().isoformat()
    assert not cache._dirty
Esempio n. 4
0
async def test_name_wrong_match_pending_snapshot(
        ha: HaSource, time: Time, config: Config,
        supervisor: SimulatedSupervisor, model: Model, interceptor,
        data_cache: DataCache):
    '''
    A pending snapshots with the wrong name shoudl not be considered made by the addon
    '''
    data_cache.snapshot("pending")[KEY_NAME] = "Test Snapshot"
    data_cache.snapshot("pending")[KEY_CREATED] = time.now().isoformat()
    data_cache.snapshot("pending")[KEY_LAST_SEEN] = time.now().isoformat()

    await supervisor.createSnapshot({"name": "Wrong Name"},
                                    date=time.now() - timedelta(hours=12))

    snapshots = await ha.get()
    assert len(snapshots) == 1
    snapshot = next(iter(snapshots.values()))
    assert not snapshot.madeByTheAddon()
async def test_date_match_wrong_pending_backup(ha: HaSource, time: Time,
                                               config: Config,
                                               supervisor: SimulatedSupervisor,
                                               model: Model, interceptor,
                                               data_cache: DataCache):
    '''
    A pending backups with the same name but with the wrong date shoudl nto be considered made by the addon
    '''
    data_cache.backup("pending")[KEY_NAME] = "Test Backup"
    data_cache.backup("pending")[KEY_CREATED] = time.now().isoformat()
    data_cache.backup("pending")[KEY_LAST_SEEN] = time.now().isoformat()

    await supervisor.createBackup({"name": "Test Backup"},
                                  date=time.now() - timedelta(hours=25))

    backups = await ha.get()
    assert len(backups) == 1
    backups = next(iter(backups.values()))
    assert not backups.madeByTheAddon()
def test_parse_timezone(time) -> None:
    assertUtc(Time.parse("1985-12-06"))
    assertUtc(Time.parse("1985-12-06 21:21"))
    assertUtc(Time.parse("1985-12-06 21:21+00:00"))
    assertUtc(Time.parse("1985-12-06 21:21 UTC"))
    assertUtc(Time.parse("1985-12-06 21:21 GGGR"))

    assertOffset(Time.parse("1985-12-06 21:21+10"), 10)
    assertOffset(Time.parse("1985-12-06 21:21-10"), -10)
 def __init__(self, config: Config, requests: HaRequests, time: Time):
     super().__init__("StartandStopTimer", self.check, time, 10)
     self.requests = requests
     self.config = config
     self.time = time
     self.must_start = set()
     self.must_enable_watchdog = set()
     self.stop_start_check_time = time.now()
     self._backing_up = False
     self.allow_run = False
     self.lock = Lock()
async def test_ignored_snapshots(updater: HaUpdater, time: Time,
                                 server: SimulationServer, snapshot,
                                 supervisor: SimulatedSupervisor,
                                 coord: Coordinator, config: Config):
    config.override(Setting.IGNORE_OTHER_SNAPSHOTS, True)
    await supervisor.createSnapshot({'name': "test_snapshot"}, date=time.now())
    await coord.sync()
    await updater.update()
    state = supervisor.getAttributes("sensor.snapshot_backup")
    assert state["snapshots_in_google_drive"] == 1
    assert state["snapshots_in_home_assistant"] == 1
    assert len(state["snapshots"]) == 2
def test_removal_order_week(time: Time):
    config = GenConfig(weeks=1, day_of_week='wed', aggressive=True)
    scheme = GenerationalScheme(time, config, count=1)
    backups = [
        makeBackup("test", time.local(2019, 10, 28)),
        makeBackup("test", time.local(2019, 10, 29)),
        makeBackup("test", time.local(2019, 10, 30, 1)),
        makeBackup("test", time.local(2019, 10, 30, 2)),
        makeBackup("test", time.local(2019, 10, 31)),
        makeBackup("test", time.local(2019, 11, 1)),
        makeBackup("test", time.local(2019, 11, 2)),
        makeBackup("test", time.local(2019, 11, 3)),
    ]
    assertRemovalOrder(scheme, backups, [
        time.local(2019, 10, 28),
        time.local(2019, 10, 29),
        time.local(2019, 10, 30, 1),
        time.local(2019, 10, 31),
        time.local(2019, 11, 1),
        time.local(2019, 11, 2),
        time.local(2019, 11, 3)
    ])
Esempio n. 10
0
def main() -> None:
    logging.getLogger('googleapiclient.discovery_cache').setLevel(
        logging.ERROR)

    time: Time = Time()

    if len(sys.argv) == 1:
        config: Config = Config([HASSIO_OPTIONS_FILE])
    else:
        config: Config = Config(sys.argv[1:])

    hassio: Hassio = Hassio(config)
    while True:
        try:
            hassio.loadInfo()
            break
        except Exception:
            LogBase().critical(
                "Unable to reach Hassio supervisor.  Please ensure the supervisor is running."
            )
            sleep(10)

    if config.warnIngress():
        LogBase().warn(
            "This add-on supports ingress but your verison of Home Assistant does not.  Please update to the latest verison of home Assistant."
        )

    drive: Drive = Drive(config)
    try:
        watcher: Watcher = Watcher(time, config)
        engine: engine = Engine(watcher, config, drive, hassio,
                                time)  # type: ignore
        server: Server = Server("www", engine, config)

        engine_thread: Thread = Thread(target=engine.run)  # type: ignore
        engine_thread.setName("Engine Thread")
        engine_thread.daemon = True
        engine_thread.start()

        server_thread: Thread = Thread(target=server.run)
        server_thread.daemon = True
        server_thread.setName("Server Thread")
        server_thread.start()

        while True:
            sleep(5)
    finally:
        if watcher:
            watcher.stop()
async def test_bump_last_seen(ha: HaSource, time: Time, config: Config,
                              supervisor: SimulatedSupervisor, model: Model,
                              interceptor, data_cache: DataCache):
    backup = await ha.create(CreateOptions(time.now(), "Test Name"))
    time.advance(days=1)
    assert backup.slug() in await ha.get()
    assert data_cache.backup(
        backup.slug())[KEY_LAST_SEEN] == time.now().isoformat()

    time.advance(days=1)
    assert backup.slug() in await ha.get()
    assert data_cache.backup(
        backup.slug())[KEY_LAST_SEEN] == time.now().isoformat()
Esempio n. 12
0
 def __init__(self,
              data: Dict[str, Any],
              data_cache: DataCache,
              config: Config,
              retained=False):
     super().__init__(name=ensureKey('name', data, HA_KEY_TEXT),
                      slug=ensureKey('slug', data, HA_KEY_TEXT),
                      date=Time.parse(ensureKey('date', data, HA_KEY_TEXT)),
                      size=float(ensureKey("size", data, HA_KEY_TEXT)) *
                      1024 * 1024,
                      source=SOURCE_HA,
                      snapshotType=ensureKey('type', data, HA_KEY_TEXT),
                      version=ensureKey('homeassistant', data, HA_KEY_TEXT),
                      protected=ensureKey('protected', data, HA_KEY_TEXT),
                      retained=retained,
                      uploadable=True,
                      details=data)
     self._data_cache = data_cache
     self._config = config
def test_parse() -> None:
    time = Time.parse("1985-12-06 01:01:01.0001")
    assert str(time) == "1985-12-06 01:01:01.000100+00:00"

    time = Time.parse("1985-12-06 01:01:01.0001+01:00")
    assert str(time) == "1985-12-06 01:01:01.000100+01:00"
Esempio n. 14
0
 def getDriveCreds(self, time: Time) -> Creds:
     return Creds(time, "test_client_id", time.now(), "test_access_token", "test_refresh_token", "test_client_secret")
async def test_version_upgrades(time: Time, injector: Injector,
                                config: Config) -> None:
    # Simluate upgrading from an un-tracked version
    assert not os.path.exists(config.get(Setting.DATA_CACHE_FILE_PATH))
    cache = injector.get(DataCache)
    upgrade_time = time.now()
    assert cache.previousVersion == Version.default()
    assert cache.currentVersion == Version.parse(VERSION)
    assert cache.checkFlag(UpgradeFlags.DONT_IGNORE_LEGACY_SNAPSHOTS)

    assert os.path.exists(config.get(Setting.DATA_CACHE_FILE_PATH))
    with open(config.get(Setting.DATA_CACHE_FILE_PATH)) as f:
        data = json.load(f)
        assert data["upgrades"] == [{
            "prev_version": str(Version.default()),
            "new_version": VERSION,
            "date": upgrade_time.isoformat()
        }]

    # Reload the data cache, verify there is no upgrade.
    time.advance(days=1)
    cache = DataCache(config, time)
    assert cache.previousVersion == Version.parse(VERSION)
    assert cache.currentVersion == Version.parse(VERSION)
    assert not cache.checkFlag(UpgradeFlags.DONT_IGNORE_LEGACY_SNAPSHOTS)
    assert os.path.exists(config.get(Setting.DATA_CACHE_FILE_PATH))

    with open(config.get(Setting.DATA_CACHE_FILE_PATH)) as f:
        data = json.load(f)
        assert data["upgrades"] == [{
            "prev_version": str(Version.default()),
            "new_version": VERSION,
            "date": upgrade_time.isoformat()
        }]

    # simulate upgrading to a new version, verify an upgrade gets identified.
    upgrade_version = Version.parse("200")

    class UpgradeCache(DataCache):
        def __init__(self):
            super().__init__(config, time)

        @property
        def currentVersion(self):
            return upgrade_version

    cache = UpgradeCache()
    assert cache.previousVersion == Version.parse(VERSION)
    assert cache.currentVersion == upgrade_version
    assert os.path.exists(config.get(Setting.DATA_CACHE_FILE_PATH))

    with open(config.get(Setting.DATA_CACHE_FILE_PATH)) as f:
        data = json.load(f)
        assert data["upgrades"] == [{
            "prev_version": str(Version.default()),
            "new_version": VERSION,
            "date": upgrade_time.isoformat()
        }, {
            "prev_version": VERSION,
            "new_version": str(upgrade_version),
            "date": time.now().isoformat()
        }]

    next_upgrade_time = time.now()
    time.advance(days=1)
    # Verify version upgrade time queries work as expected
    assert cache.getUpgradeTime(Version.parse(VERSION)) == upgrade_time
    assert cache.getUpgradeTime(Version.default()) == upgrade_time
    assert cache.getUpgradeTime(upgrade_version) == next_upgrade_time

    # degenerate case, should never happen but a sensible value needs to be returned
    assert cache.getUpgradeTime(Version.parse("201")) == time.now()
async def test_pending_backup_nowait(ha: HaSource, time: Time,
                                     supervisor: SimulatedSupervisor,
                                     interceptor: RequestInterceptor,
                                     config: Config, data_cache: DataCache):
    interceptor.setSleep(URL_MATCH_BACKUP_FULL, sleep=5)
    config.override(Setting.NEW_BACKUP_TIMEOUT_SECONDS, 0.1)
    backup_immediate: PendingBackup = await ha.create(
        CreateOptions(time.now(), "Test Name"))
    assert isinstance(backup_immediate, PendingBackup)
    backup_pending: HABackup = (await ha.get())['pending']

    assert isinstance(backup_immediate, PendingBackup)
    assert isinstance(backup_pending, PendingBackup)
    assert backup_immediate is backup_pending
    assert backup_immediate.name() == "Test Name"
    assert backup_immediate.slug() == "pending"
    assert not backup_immediate.uploadable()
    assert backup_immediate.backupType() == "Full"
    assert backup_immediate.source() == SOURCE_HA
    assert backup_immediate.date() == time.now()
    assert not backup_immediate.protected()
    assert not backup_immediate.ignore()
    assert backup_immediate.madeByTheAddon()
    assert data_cache.backup("pending") == {
        KEY_CREATED: time.now().isoformat(),
        KEY_LAST_SEEN: time.now().isoformat(),
        KEY_NAME: "Test Name"
    }

    # Might be a little flaky but...whatever
    await asyncio.wait({ha._pending_backup_task})

    backups = await ha.get()
    assert 'pending' not in backups
    assert len(backups) == 1
    backup = next(iter(backups.values()))
    assert isinstance(backup, HABackup)
    assert not backup.ignore()
    assert backup.madeByTheAddon()
    assert data_cache.backup(
        backup.slug())[KEY_LAST_SEEN] == time.now().isoformat()
    assert "pending" not in data_cache.backups

    return
    # ignroe events for now
    assert supervisor.getEvents() == [(EVENT_BACKUP_START, {
        'backup_name':
        backup_immediate.name(),
        'backup_time':
        str(backup_immediate.date())
    })]
    ha.backup_thread.join()
    assert supervisor.getEvents() == [(EVENT_BACKUP_START, {
        'backup_name':
        backup_immediate.name(),
        'backup_time':
        str(backup_immediate.date())
    }),
                                      (EVENT_BACKUP_END, {
                                          'completed':
                                          True,
                                          'backup_name':
                                          backup_immediate.name(),
                                          'backup_time':
                                          str(backup_immediate.date())
                                      })]