async def test_run_history(hass, recorder_mock): """Test the run history gives the correct run.""" instance = recorder.get_instance(hass) now = dt_util.utcnow() three_days_ago = now - timedelta(days=3) two_days_ago = now - timedelta(days=2) one_day_ago = now - timedelta(days=1) with instance.get_session() as session: session.add(RecorderRuns(start=three_days_ago, created=three_days_ago)) session.add(RecorderRuns(start=two_days_ago, created=two_days_ago)) session.add(RecorderRuns(start=one_day_ago, created=one_day_ago)) session.commit() instance.run_history.load_from_db(session) assert (process_timestamp( instance.run_history.get(three_days_ago + timedelta( microseconds=1)).start) == three_days_ago) assert (process_timestamp( instance.run_history.get(two_days_ago + timedelta( microseconds=1)).start) == two_days_ago) assert (process_timestamp( instance.run_history.get(one_day_ago + timedelta( microseconds=1)).start) == one_day_ago) assert (process_timestamp(instance.run_history.get(now).start) == instance.run_history.recording_start)
def test_statistics_runs_initiated(hass_recorder): """Test statistics_runs is initiated when DB is created.""" now = dt_util.utcnow() with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=now): hass = hass_recorder() wait_recording_done(hass) with session_scope(hass=hass) as session: statistics_runs = list(session.query(StatisticsRuns)) assert len(statistics_runs) == 1 last_run = process_timestamp(statistics_runs[0].start) assert process_timestamp(last_run) == now.replace( minute=0, second=0, microsecond=0) - timedelta(hours=1)
def test_get_significant_states_minimal_response(self): """Test that only significant states are returned. When minimal responses is set only the first and last states return a complete state. We should get back every thermostat change that includes an attribute change, but only the state updates for media player (attribute changes are not significant and not returned). """ zero, four, states = self.record_states() hist = history.get_significant_states(self.hass, zero, four, filters=history.Filters(), minimal_response=True) # The second media_player.test state is reduced # down to last_changed and state when minimal_response # is set. We use JSONEncoder to make sure that are # pre-encoded last_changed is always the same as what # will happen with encoding a native state input_state = states["media_player.test"][1] orig_last_changed = json.dumps( process_timestamp(input_state.last_changed), cls=JSONEncoder, ).replace('"', "") orig_state = input_state.state states["media_player.test"][1] = { "last_changed": orig_last_changed, "state": orig_state, } assert states == hist
def time_fired(self): """Time event was fired in utc.""" if not self._time_fired: self._time_fired = ( process_timestamp(self._row.time_fired) or dt_util.utcnow() ) return self._time_fired
def test_compile_missing_statistics(tmpdir): """Test missing statistics are compiled on startup.""" now = dt_util.utcnow().replace(minute=0, second=0, microsecond=0) test_db_file = tmpdir.mkdir("sqlite").join("test_run_info.db") dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=now): hass = get_test_home_assistant() setup_component(hass, DOMAIN, {DOMAIN: {CONF_DB_URL: dburl}}) hass.start() wait_recording_done(hass) wait_recording_done(hass) with session_scope(hass=hass) as session: statistics_runs = list(session.query(StatisticsRuns)) assert len(statistics_runs) == 1 last_run = process_timestamp(statistics_runs[0].start) assert last_run == now - timedelta(minutes=5) wait_recording_done(hass) wait_recording_done(hass) hass.stop() with patch( "homeassistant.components.recorder.dt_util.utcnow", return_value=now + timedelta(hours=1), ): hass = get_test_home_assistant() setup_component(hass, DOMAIN, {DOMAIN: {CONF_DB_URL: dburl}}) hass.start() wait_recording_done(hass) wait_recording_done(hass) with session_scope(hass=hass) as session: statistics_runs = list(session.query(StatisticsRuns)) assert len(statistics_runs) == 13 # 12 5-minute runs last_run = process_timestamp(statistics_runs[1].start) assert last_run == now wait_recording_done(hass) wait_recording_done(hass) hass.stop()
def test_get_significant_states_minimal_response(hass_recorder): """Test that only significant states are returned. When minimal responses is set only the first and last states return a complete state. We should get back every thermostat change that includes an attribute change, but only the state updates for media player (attribute changes are not significant and not returned). """ hass = hass_recorder() zero, four, states = record_states(hass) hist = history.get_significant_states(hass, zero, four, minimal_response=True) entites_with_reducable_states = [ "media_player.test", "media_player.test3", ] # All states for media_player.test state are reduced # down to last_changed and state when minimal_response # is set except for the first state. # is set. We use JSONEncoder to make sure that are # pre-encoded last_changed is always the same as what # will happen with encoding a native state for entity_id in entites_with_reducable_states: entity_states = states[entity_id] for state_idx in range(1, len(entity_states)): input_state = entity_states[state_idx] orig_last_changed = orig_last_changed = json.dumps( process_timestamp(input_state.last_changed), cls=JSONEncoder, ).replace('"', "") orig_state = input_state.state entity_states[state_idx] = { "last_changed": orig_last_changed, "state": orig_state, } assert states == hist
async def test_process_timestamp(): """Test processing time stamp to UTC.""" datetime_with_tzinfo = datetime(2016, 7, 9, 11, 0, 0, tzinfo=dt.UTC) datetime_without_tzinfo = datetime(2016, 7, 9, 11, 0, 0) est = dt_util.get_time_zone("US/Eastern") datetime_est_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=est) nst = dt_util.get_time_zone("Canada/Newfoundland") datetime_nst_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=nst) hst = dt_util.get_time_zone("US/Hawaii") datetime_hst_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=hst) assert process_timestamp(datetime_with_tzinfo) == datetime(2016, 7, 9, 11, 0, 0, tzinfo=dt.UTC) assert process_timestamp(datetime_without_tzinfo) == datetime( 2016, 7, 9, 11, 0, 0, tzinfo=dt.UTC) assert process_timestamp(datetime_est_timezone) == datetime(2016, 7, 9, 15, 0, tzinfo=dt.UTC) assert process_timestamp(datetime_nst_timezone) == datetime(2016, 7, 9, 13, 30, tzinfo=dt.UTC) assert process_timestamp(datetime_hst_timezone) == datetime(2016, 7, 9, 21, 0, tzinfo=dt.UTC) assert process_timestamp(None) is None
def last_updated(self): """Last updated datetime.""" if not self._last_updated: self._last_updated = process_timestamp(self._row.last_updated) return self._last_updated
async def test_get_full_significant_states_handles_empty_last_changed( hass: ha.HomeAssistant, async_setup_recorder_instance: SetupRecorderInstanceT, ): """Test getting states when last_changed is null.""" await async_setup_recorder_instance(hass, {}) now = dt_util.utcnow() hass.states.async_set("sensor.one", "on", {"attr": "original"}) state0 = hass.states.get("sensor.one") await hass.async_block_till_done() hass.states.async_set("sensor.one", "on", {"attr": "new"}) state1 = hass.states.get("sensor.one") assert state0.last_changed == state1.last_changed assert state0.last_updated != state1.last_updated await async_wait_recording_done(hass) def _get_entries(): with session_scope(hass=hass) as session: return history.get_full_significant_states_with_session( hass, session, now, dt_util.utcnow(), entity_ids=["sensor.one"], significant_changes_only=False, ) states = await recorder.get_instance(hass).async_add_executor_job( _get_entries) sensor_one_states: list[State] = states["sensor.one"] assert sensor_one_states[0] == state0 assert sensor_one_states[1] == state1 assert sensor_one_states[0].last_changed == sensor_one_states[ 1].last_changed assert sensor_one_states[0].last_updated != sensor_one_states[ 1].last_updated def _fetch_native_states() -> list[State]: with session_scope(hass=hass) as session: native_states = [] db_state_attributes = { state_attributes.attributes_id: state_attributes for state_attributes in session.query(StateAttributes) } for db_state in session.query(States): state = db_state.to_native() state.attributes = db_state_attributes[ db_state.attributes_id].to_native() native_states.append(state) return native_states native_sensor_one_states = await recorder.get_instance( hass).async_add_executor_job(_fetch_native_states) assert native_sensor_one_states[0] == state0 assert native_sensor_one_states[1] == state1 assert (native_sensor_one_states[0].last_changed == native_sensor_one_states[1].last_changed) assert (native_sensor_one_states[0].last_updated != native_sensor_one_states[1].last_updated) def _fetch_db_states() -> list[State]: with session_scope(hass=hass) as session: states = list(session.query(States)) session.expunge_all() return states db_sensor_one_states = await recorder.get_instance( hass).async_add_executor_job(_fetch_db_states) assert db_sensor_one_states[0].last_changed is None assert (process_timestamp( db_sensor_one_states[1].last_changed) == state0.last_changed) assert db_sensor_one_states[0].last_updated is not None assert db_sensor_one_states[1].last_updated is not None assert db_sensor_one_states[0].last_updated != db_sensor_one_states[ 1].last_updated