def test_delete_metadata_duplicates_no_duplicates(hass_recorder, caplog): """Test removal of duplicated statistics.""" hass = hass_recorder() wait_recording_done(hass) with session_scope(hass=hass) as session: delete_statistics_meta_duplicates(session) assert "duplicated statistics_meta rows" not in caplog.text
def _setup_get_states(hass): """Set up for testing get_states.""" states = [] now = dt_util.utcnow() with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=now): for i in range(5): state = ha.State( f"test.point_in_time_{i % 5}", f"State {i}", {"attribute_test": i}, ) mock_state_change_event(hass, state) states.append(state) wait_recording_done(hass) future = now + timedelta(seconds=1) with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=future): for i in range(5): state = ha.State( f"test.point_in_time_{i % 5}", f"State {i}", {"attribute_test": i}, ) mock_state_change_event(hass, state) wait_recording_done(hass) return now, future, states
def test_compile_hourly_statistics_unchanged(hass_recorder, caplog, device_class, unit, value): """Test compiling hourly statistics, with no changes during the hour.""" zero = dt_util.utcnow() hass = hass_recorder() recorder = hass.data[DATA_INSTANCE] setup_component(hass, "sensor", {}) attributes = { "device_class": device_class, "state_class": "measurement", "unit_of_measurement": unit, } four, states = record_states(hass, zero, "sensor.test1", attributes) hist = history.get_significant_states(hass, zero, four) assert dict(states) == dict(hist) recorder.do_adhoc_statistics(period="hourly", start=four) wait_recording_done(hass) stats = statistics_during_period(hass, four) assert stats == { "sensor.test1": [{ "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(four), "mean": approx(value), "min": approx(value), "max": approx(value), "last_reset": None, "state": None, "sum": None, }] } assert "Error while processing event StatisticsTask" not in caplog.text
def test_compile_hourly_statistics(hass_recorder): """Test compiling hourly statistics.""" hass = hass_recorder() recorder = hass.data[DATA_INSTANCE] setup_component(hass, "sensor", {}) zero, four, states = record_states(hass) hist = history.get_significant_states(hass, zero, four) assert dict(states) == dict(hist) recorder.do_adhoc_statistics(period="hourly", start=zero) wait_recording_done(hass) stats = statistics_during_period(hass, zero) assert stats == { "sensor.test1": [ { "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(zero), "mean": 14.915254237288135, "min": 10.0, "max": 20.0, "last_reset": None, "state": None, "sum": None, } ] }
def test_compile_hourly_statistics_partially_unavailable( hass_recorder, caplog): """Test compiling hourly statistics, with the sensor being partially unavailable.""" zero = dt_util.utcnow() hass = hass_recorder() recorder = hass.data[DATA_INSTANCE] setup_component(hass, "sensor", {}) four, states = record_states_partially_unavailable( hass, zero, "sensor.test1", TEMPERATURE_SENSOR_ATTRIBUTES) hist = history.get_significant_states(hass, zero, four) assert dict(states) == dict(hist) recorder.do_adhoc_statistics(period="hourly", start=zero) wait_recording_done(hass) stats = statistics_during_period(hass, zero) assert stats == { "sensor.test1": [{ "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(zero), "mean": approx(21.1864406779661), "min": approx(10.0), "max": approx(25.0), "last_reset": None, "state": None, "sum": None, }] } assert "Error while processing event StatisticsTask" not in caplog.text
def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set("sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state
def test_state_changes_during_period_multiple_entities_single_test( hass_recorder): """Test state change during period with multiple entities in the same test. This test ensures the sqlalchemy query cache does not generate incorrect results. """ hass = hass_recorder() start = dt_util.utcnow() test_entites = {f"sensor.{i}": str(i) for i in range(30)} for entity_id, value in test_entites.items(): hass.states.set(entity_id, value) wait_recording_done(hass) end = dt_util.utcnow() hist = history.state_changes_during_period(hass, start, end, None) for entity_id, value in test_entites.items(): hist[entity_id][0].state == value for entity_id, value in test_entites.items(): hist = history.state_changes_during_period(hass, start, end, entity_id) assert len(hist) == 1 hist[entity_id][0].state == value hist = history.state_changes_during_period(hass, start, end, None) for entity_id, value in test_entites.items(): hist[entity_id][0].state == value
def test_delete_duplicates_no_duplicates(hass_recorder, caplog): """Test removal of duplicated statistics.""" hass = hass_recorder() wait_recording_done(hass) with session_scope(hass=hass) as session: delete_duplicates(hass.data[DATA_INSTANCE], session) assert "duplicated statistics rows" not in caplog.text assert "Found non identical" not in caplog.text assert "Found duplicated" not in caplog.text
def test_rename_entity(hass_recorder): """Test statistics is migrated when entity_id is changed.""" hass = hass_recorder() recorder = hass.data[DATA_INSTANCE] setup_component(hass, "sensor", {}) entity_reg = mock_registry(hass) reg_entry = entity_reg.async_get_or_create( "sensor", "test", "unique_0000", suggested_object_id="test1", ) assert reg_entry.entity_id == "sensor.test1" zero, four, states = record_states(hass) hist = history.get_significant_states(hass, zero, four) assert dict(states) == dict(hist) for kwargs in ({}, {"statistic_ids": ["sensor.test1"]}): stats = statistics_during_period(hass, zero, period="5minute", **kwargs) assert stats == {} stats = get_last_short_term_statistics(hass, 0, "sensor.test1", True) assert stats == {} recorder.do_adhoc_statistics(start=zero) wait_recording_done(hass) expected_1 = { "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(zero), "end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)), "mean": approx(14.915254237288135), "min": approx(10.0), "max": approx(20.0), "last_reset": None, "state": None, "sum": None, } expected_stats1 = [ {**expected_1, "statistic_id": "sensor.test1"}, ] expected_stats2 = [ {**expected_1, "statistic_id": "sensor.test2"}, ] expected_stats99 = [ {**expected_1, "statistic_id": "sensor.test99"}, ] stats = statistics_during_period(hass, zero, period="5minute") assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2} entity_reg.async_update_entity(reg_entry.entity_id, new_entity_id="sensor.test99") hass.block_till_done() stats = statistics_during_period(hass, zero, period="5minute") assert stats == {"sensor.test99": expected_stats99, "sensor.test2": expected_stats2}
def test_duplicate_statistics_handle_integrity_error(hass_recorder, caplog): """Test the recorder does not blow up if statistics is duplicated.""" hass = hass_recorder() wait_recording_done(hass) period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00")) period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00")) external_energy_metadata_1 = { "has_mean": False, "has_sum": True, "name": "Total imported energy", "source": "test", "statistic_id": "test:total_energy_import_tariff_1", "unit_of_measurement": "kWh", } external_energy_statistics_1 = [ { "start": period1, "last_reset": None, "state": 3, "sum": 5, }, ] external_energy_statistics_2 = [ { "start": period2, "last_reset": None, "state": 3, "sum": 6, } ] with patch.object( statistics, "_statistics_exists", return_value=False ), patch.object( statistics, "_insert_statistics", wraps=statistics._insert_statistics ) as insert_statistics_mock: async_add_external_statistics( hass, external_energy_metadata_1, external_energy_statistics_1 ) async_add_external_statistics( hass, external_energy_metadata_1, external_energy_statistics_1 ) async_add_external_statistics( hass, external_energy_metadata_1, external_energy_statistics_2 ) wait_recording_done(hass) assert insert_statistics_mock.call_count == 3 with session_scope(hass=hass) as session: tmp = session.query(recorder.models.Statistics).all() assert len(tmp) == 2 assert "Blocked attempt to insert duplicated statistic rows" in caplog.text
def test_get_states(self): """Test getting states at a specific point in time.""" self.test_setup() states = [] now = dt_util.utcnow() with patch( "homeassistant.components.recorder.dt_util.utcnow", return_value=now ): for i in range(5): state = ha.State( "test.point_in_time_{}".format(i % 5), f"State {i}", {"attribute_test": i}, ) mock_state_change_event(self.hass, state) states.append(state) wait_recording_done(self.hass) future = now + timedelta(seconds=1) with patch( "homeassistant.components.recorder.dt_util.utcnow", return_value=future ): for i in range(5): state = ha.State( "test.point_in_time_{}".format(i % 5), f"State {i}", {"attribute_test": i}, ) mock_state_change_event(self.hass, state) wait_recording_done(self.hass) # Get states returns everything before POINT for state1, state2 in zip( states, sorted( history.get_states(self.hass, future), key=lambda state: state.entity_id ), ): assert state1 == state2 # Test get_state here because we have a DB setup assert states[0] == history.get_state(self.hass, future, states[0].entity_id) time_before_recorder_ran = now - timedelta(days=1000) assert history.get_states(self.hass, time_before_recorder_ran) == [] assert history.get_state(self.hass, time_before_recorder_ran, "demo.id") is None
def test_compile_hourly_statistics_unsupported(hass_recorder, caplog, attributes): """Test compiling hourly statistics for unsupported sensor.""" attributes = dict(attributes) zero = dt_util.utcnow() hass = hass_recorder() recorder = hass.data[DATA_INSTANCE] setup_component(hass, "sensor", {}) four, states = record_states(hass, zero, "sensor.test1", attributes) if "unit_of_measurement" in attributes: attributes["unit_of_measurement"] = "invalid" _, _states = record_states(hass, zero, "sensor.test2", attributes) states = {**states, **_states} attributes.pop("unit_of_measurement") _, _states = record_states(hass, zero, "sensor.test3", attributes) states = {**states, **_states} attributes["state_class"] = "invalid" _, _states = record_states(hass, zero, "sensor.test4", attributes) states = {**states, **_states} attributes.pop("state_class") _, _states = record_states(hass, zero, "sensor.test5", attributes) states = {**states, **_states} attributes["state_class"] = "measurement" _, _states = record_states(hass, zero, "sensor.test6", attributes) states = {**states, **_states} attributes["state_class"] = "unsupported" _, _states = record_states(hass, zero, "sensor.test7", attributes) states = {**states, **_states} hist = history.get_significant_states(hass, zero, four) assert dict(states) == dict(hist) recorder.do_adhoc_statistics(period="hourly", start=zero) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [{ "statistic_id": "sensor.test1", "unit_of_measurement": "°C" }] stats = statistics_during_period(hass, zero) assert stats == { "sensor.test1": [{ "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(zero), "mean": approx(16.440677966101696), "min": approx(10.0), "max": approx(30.0), "last_reset": None, "state": None, "sum": None, }] } assert "Error while processing event StatisticsTask" not in caplog.text
def test_compile_periodic_statistics_exception( hass_recorder, mock_sensor_statistics, mock_from_stats ): """Test exception handling when compiling periodic statistics.""" hass = hass_recorder() recorder = hass.data[DATA_INSTANCE] setup_component(hass, "sensor", {}) now = dt_util.utcnow() recorder.do_adhoc_statistics(start=now) recorder.do_adhoc_statistics(start=now + timedelta(minutes=5)) wait_recording_done(hass) expected_1 = { "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(now), "end": process_timestamp_to_utc_isoformat(now + timedelta(minutes=5)), "mean": None, "min": None, "max": None, "last_reset": None, "state": None, "sum": None, } expected_2 = { "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(now + timedelta(minutes=5)), "end": process_timestamp_to_utc_isoformat(now + timedelta(minutes=10)), "mean": None, "min": None, "max": None, "last_reset": None, "state": None, "sum": None, } expected_stats1 = [ {**expected_1, "statistic_id": "sensor.test1"}, {**expected_2, "statistic_id": "sensor.test1"}, ] expected_stats2 = [ {**expected_2, "statistic_id": "sensor.test2"}, ] expected_stats3 = [ {**expected_1, "statistic_id": "sensor.test3"}, {**expected_2, "statistic_id": "sensor.test3"}, ] stats = statistics_during_period(hass, now, period="5minute") assert stats == { "sensor.test1": expected_stats1, "sensor.test2": expected_stats2, "sensor.test3": expected_stats3, }
def test_compile_hourly_statistics_fails(hass_recorder, caplog): """Test compiling hourly statistics throws.""" zero = dt_util.utcnow() hass = hass_recorder() recorder = hass.data[DATA_INSTANCE] setup_component(hass, "sensor", {}) with patch( "homeassistant.components.sensor.recorder.compile_statistics", side_effect=Exception, ): recorder.do_adhoc_statistics(period="hourly", start=zero) wait_recording_done(hass) assert "Error while processing event StatisticsTask" in caplog.text
def test_compile_hourly_statistics_unavailable(hass_recorder): """Test compiling hourly statistics, with the sensor being unavailable.""" hass = hass_recorder() recorder = hass.data[DATA_INSTANCE] setup_component(hass, "sensor", {}) zero, four, states = record_states_partially_unavailable(hass) hist = history.get_significant_states(hass, zero, four) assert dict(states) == dict(hist) recorder.do_adhoc_statistics(period="hourly", start=four) wait_recording_done(hass) stats = statistics_during_period(hass, four) assert stats == {}
def test_execute_stmt(hass_recorder): """Test executing with execute_stmt.""" hass = hass_recorder() instance = recorder.get_instance(hass) hass.states.set("sensor.on", "on") new_state = hass.states.get("sensor.on") wait_recording_done(hass) now = dt_util.utcnow() tomorrow = now + timedelta(days=1) one_week_from_now = now + timedelta(days=7) class MockExecutor: _calls = 0 def __init__(self, stmt): """Init the mock.""" def all(self): MockExecutor._calls += 1 if MockExecutor._calls == 2: return ["mock_row"] raise SQLAlchemyError with session_scope(hass=hass) as session: # No time window, we always get a list stmt = history._get_single_entity_states_stmt(instance.schema_version, dt_util.utcnow(), "sensor.on", False) rows = util.execute_stmt(session, stmt) assert isinstance(rows, list) assert rows[0].state == new_state.state assert rows[0].entity_id == new_state.entity_id # Time window >= 2 days, we get a ChunkedIteratorResult rows = util.execute_stmt(session, stmt, now, one_week_from_now) assert isinstance(rows, ChunkedIteratorResult) row = next(rows) assert row.state == new_state.state assert row.entity_id == new_state.entity_id # Time window < 2 days, we get a list rows = util.execute_stmt(session, stmt, now, tomorrow) assert isinstance(rows, list) assert rows[0].state == new_state.state assert rows[0].entity_id == new_state.entity_id with patch.object(session, "execute", MockExecutor): rows = util.execute_stmt(session, stmt, now, tomorrow) assert rows == ["mock_row"]
def test_statistics_duplicated(hass_recorder, caplog): """Test statistics with same start time is not compiled.""" hass = hass_recorder() recorder = hass.data[DATA_INSTANCE] setup_component(hass, "sensor", {}) zero, four, states = record_states(hass) hist = history.get_significant_states(hass, zero, four) assert dict(states) == dict(hist) wait_recording_done(hass) assert "Compiling statistics for" not in caplog.text assert "Statistics already compiled" not in caplog.text with patch( "homeassistant.components.sensor.recorder.compile_statistics", return_value=statistics.PlatformCompiledStatistics([], {}), ) as compile_statistics: recorder.do_adhoc_statistics(start=zero) wait_recording_done(hass) assert compile_statistics.called compile_statistics.reset_mock() assert "Compiling statistics for" in caplog.text assert "Statistics already compiled" not in caplog.text caplog.clear() recorder.do_adhoc_statistics(start=zero) wait_recording_done(hass) assert not compile_statistics.called compile_statistics.reset_mock() assert "Compiling statistics for" not in caplog.text assert "Statistics already compiled" in caplog.text caplog.clear()
def test_demo_statistics(hass_recorder): """Test that the demo components makes some statistics available.""" hass = hass_recorder() assert setup_component(hass, DOMAIN, {DOMAIN: {}}) hass.block_till_done() hass.start() wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert { "name": None, "source": "demo", "statistic_id": "demo:temperature_outdoor", "unit_of_measurement": "°C", } in statistic_ids assert { "name": None, "source": "demo", "statistic_id": "demo:energy_consumption", "unit_of_measurement": "kWh", } in statistic_ids
def test_compile_hourly_energy_statistics(hass_recorder): """Test compiling hourly statistics.""" hass = hass_recorder() recorder = hass.data[DATA_INSTANCE] setup_component(hass, "sensor", {}) sns1_attr = {"device_class": "energy", "state_class": "measurement"} sns2_attr = {"device_class": "energy"} sns3_attr = {} zero, four, eight, states = record_energy_states( hass, sns1_attr, sns2_attr, sns3_attr ) hist = history.get_significant_states( hass, zero - timedelta.resolution, eight + timedelta.resolution ) assert dict(states)["sensor.test1"] == dict(hist)["sensor.test1"] recorder.do_adhoc_statistics(period="hourly", start=zero) wait_recording_done(hass) recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=1)) wait_recording_done(hass) recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=2)) wait_recording_done(hass) stats = statistics_during_period(hass, zero) assert stats == { "sensor.test1": [ { "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(zero), "max": None, "mean": None, "min": None, "last_reset": process_timestamp_to_utc_isoformat(zero), "state": 20.0, "sum": 10.0, }, { "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), "max": None, "mean": None, "min": None, "last_reset": process_timestamp_to_utc_isoformat(four), "state": 40.0, "sum": 10.0, }, { "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)), "max": None, "mean": None, "min": None, "last_reset": process_timestamp_to_utc_isoformat(four), "state": 70.0, "sum": 40.0, }, ] }
def test_compile_hourly_statistics_partially_unavailable(hass_recorder): """Test compiling hourly statistics, with the sensor being partially unavailable.""" hass = hass_recorder() recorder = hass.data[DATA_INSTANCE] setup_component(hass, "sensor", {}) zero, four, states = record_states_partially_unavailable(hass) hist = history.get_significant_states(hass, zero, four) assert dict(states) == dict(hist) recorder.do_adhoc_statistics(period="hourly", start=zero) wait_recording_done(hass) stats = statistics_during_period(hass, zero) assert stats == { "sensor.test1": [{ "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(zero), "mean": approx(21.1864406779661), "min": approx(10.0), "max": approx(25.0), "last_reset": None, "state": None, "sum": None, }] }
def test_compile_hourly_statistics_unchanged(opp_recorder): """Test compiling hourly statistics, with no changes during the hour.""" opp = opp_recorder() recorder = opp.data[DATA_INSTANCE] setup_component(opp, "sensor", {}) zero, four, states = record_states(opp) hist = history.get_significant_states(opp, zero, four) assert dict(states) == dict(hist) recorder.do_adhoc_statistics(period="hourly", start=four) wait_recording_done(opp) stats = statistics_during_period(opp, four) assert stats == { "sensor.test1": [{ "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(four), "mean": approx(30.0), "min": approx(30.0), "max": approx(30.0), "last_reset": None, "state": None, "sum": None, }] }
def init_recorder(self): """Initialize the recorder.""" init_recorder_component(self.hass) self.hass.start() wait_recording_done(self.hass)
def set_state(entity_id, state, **kwargs): """Set the state.""" self.hass.states.set(entity_id, state, **kwargs) wait_recording_done(self.hass) return self.hass.states.get(entity_id)
def set_state(state): """Set the state.""" hass.states.set(entity_id, state) wait_recording_done(hass) return hass.states.get(entity_id)
def test_compile_hourly_statistics(hass_recorder): """Test compiling hourly statistics.""" hass = hass_recorder() recorder = hass.data[DATA_INSTANCE] setup_component(hass, "sensor", {}) zero, four, states = record_states(hass) hist = history.get_significant_states(hass, zero, four) assert dict(states) == dict(hist) # Should not fail if there is nothing there yet stats = get_latest_short_term_statistics(hass, ["sensor.test1"]) assert stats == {} for kwargs in ({}, {"statistic_ids": ["sensor.test1"]}): stats = statistics_during_period(hass, zero, period="5minute", **kwargs) assert stats == {} stats = get_last_short_term_statistics(hass, 0, "sensor.test1", True) assert stats == {} recorder.do_adhoc_statistics(start=zero) recorder.do_adhoc_statistics(start=four) wait_recording_done(hass) expected_1 = { "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(zero), "end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)), "mean": approx(14.915254237288135), "min": approx(10.0), "max": approx(20.0), "last_reset": None, "state": None, "sum": None, } expected_2 = { "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(four), "end": process_timestamp_to_utc_isoformat(four + timedelta(minutes=5)), "mean": approx(20.0), "min": approx(20.0), "max": approx(20.0), "last_reset": None, "state": None, "sum": None, } expected_stats1 = [ { **expected_1, "statistic_id": "sensor.test1" }, { **expected_2, "statistic_id": "sensor.test1" }, ] expected_stats2 = [ { **expected_1, "statistic_id": "sensor.test2" }, { **expected_2, "statistic_id": "sensor.test2" }, ] # Test statistics_during_period stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": expected_stats1, "sensor.test2": expected_stats2 } stats = statistics_during_period(hass, zero, statistic_ids=["sensor.test2"], period="5minute") assert stats == {"sensor.test2": expected_stats2} stats = statistics_during_period(hass, zero, statistic_ids=["sensor.test3"], period="5minute") assert stats == {} # Test get_last_short_term_statistics and get_latest_short_term_statistics stats = get_last_short_term_statistics(hass, 0, "sensor.test1", True) assert stats == {} stats = get_last_short_term_statistics(hass, 1, "sensor.test1", True) assert stats == { "sensor.test1": [{ **expected_2, "statistic_id": "sensor.test1" }] } stats = get_latest_short_term_statistics(hass, ["sensor.test1"]) assert stats == { "sensor.test1": [{ **expected_2, "statistic_id": "sensor.test1" }] } metadata = get_metadata(hass, statistic_ids=['sensor.test1"']) stats = get_latest_short_term_statistics(hass, ["sensor.test1"], metadata=metadata) assert stats == { "sensor.test1": [{ **expected_2, "statistic_id": "sensor.test1" }] } stats = get_last_short_term_statistics(hass, 2, "sensor.test1", True) assert stats == {"sensor.test1": expected_stats1[::-1]} stats = get_last_short_term_statistics(hass, 3, "sensor.test1", True) assert stats == {"sensor.test1": expected_stats1[::-1]} stats = get_last_short_term_statistics(hass, 1, "sensor.test3", True) assert stats == {} recorder.get_session().query(StatisticsShortTerm).delete() # Should not fail there is nothing in the table stats = get_latest_short_term_statistics(hass, ["sensor.test1"]) assert stats == {}
def test_compile_hourly_statistics(hass_recorder): """Test compiling hourly statistics.""" hass = hass_recorder() recorder = hass.data[DATA_INSTANCE] setup_component(hass, "sensor", {}) zero, four, states = record_states(hass) hist = history.get_significant_states(hass, zero, four) assert dict(states) == dict(hist) for kwargs in ({}, {"statistic_ids": ["sensor.test1"]}): stats = statistics_during_period(hass, zero, **kwargs) assert stats == {} stats = get_last_statistics(hass, 0, "sensor.test1") assert stats == {} recorder.do_adhoc_statistics(period="hourly", start=zero) recorder.do_adhoc_statistics(period="hourly", start=four) wait_recording_done(hass) expected_1 = { "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(zero), "mean": approx(14.915254237288135), "min": approx(10.0), "max": approx(20.0), "last_reset": None, "state": None, "sum": None, } expected_2 = { "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(four), "mean": approx(20.0), "min": approx(20.0), "max": approx(20.0), "last_reset": None, "state": None, "sum": None, } expected_stats1 = [ { **expected_1, "statistic_id": "sensor.test1" }, { **expected_2, "statistic_id": "sensor.test1" }, ] expected_stats2 = [ { **expected_1, "statistic_id": "sensor.test2" }, { **expected_2, "statistic_id": "sensor.test2" }, ] # Test statistics_during_period stats = statistics_during_period(hass, zero) assert stats == { "sensor.test1": expected_stats1, "sensor.test2": expected_stats2 } stats = statistics_during_period(hass, zero, statistic_ids=["sensor.test2"]) assert stats == {"sensor.test2": expected_stats2} stats = statistics_during_period(hass, zero, statistic_ids=["sensor.test3"]) assert stats == {} # Test get_last_statistics stats = get_last_statistics(hass, 0, "sensor.test1") assert stats == {} stats = get_last_statistics(hass, 1, "sensor.test1") assert stats == { "sensor.test1": [{ **expected_2, "statistic_id": "sensor.test1" }] } stats = get_last_statistics(hass, 2, "sensor.test1") assert stats == {"sensor.test1": expected_stats1[::-1]} stats = get_last_statistics(hass, 3, "sensor.test1") assert stats == {"sensor.test1": expected_stats1[::-1]} stats = get_last_statistics(hass, 1, "sensor.test3") assert stats == {}
def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch("homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": { "hours": max_age }, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get( "max_age") + timedelta(hours=1)
def test_external_statistics_errors(hass_recorder, caplog): """Test validation of external statistics.""" hass = hass_recorder() wait_recording_done(hass) assert "Compiling statistics for" not in caplog.text assert "Statistics already compiled" not in caplog.text zero = dt_util.utcnow() period1 = zero.replace(minute=0, second=0, microsecond=0) + timedelta(hours=1) _external_statistics = { "start": period1, "last_reset": None, "state": 0, "sum": 2, } _external_metadata = { "has_mean": False, "has_sum": True, "name": "Total imported energy", "source": "test", "statistic_id": "test:total_energy_import", "unit_of_measurement": "kWh", } # Attempt to insert statistics for an entity external_metadata = { **_external_metadata, "statistic_id": "sensor.total_energy_import", } external_statistics = {**_external_statistics} with pytest.raises(HomeAssistantError): async_add_external_statistics(hass, external_metadata, (external_statistics, )) wait_recording_done(hass) assert statistics_during_period(hass, zero, period="hour") == {} assert list_statistic_ids(hass) == [] assert get_metadata(hass, statistic_ids=("sensor.total_energy_import", )) == {} # Attempt to insert statistics for the wrong domain external_metadata = {**_external_metadata, "source": "other"} external_statistics = {**_external_statistics} with pytest.raises(HomeAssistantError): async_add_external_statistics(hass, external_metadata, (external_statistics, )) wait_recording_done(hass) assert statistics_during_period(hass, zero, period="hour") == {} assert list_statistic_ids(hass) == [] assert get_metadata(hass, statistic_ids=("test:total_energy_import", )) == {} # Attempt to insert statistics for an naive starting time external_metadata = {**_external_metadata} external_statistics = { **_external_statistics, "start": period1.replace(tzinfo=None), } with pytest.raises(HomeAssistantError): async_add_external_statistics(hass, external_metadata, (external_statistics, )) wait_recording_done(hass) assert statistics_during_period(hass, zero, period="hour") == {} assert list_statistic_ids(hass) == [] assert get_metadata(hass, statistic_ids=("test:total_energy_import", )) == {} # Attempt to insert statistics for an invalid starting time external_metadata = {**_external_metadata} external_statistics = { **_external_statistics, "start": period1.replace(minute=1) } with pytest.raises(HomeAssistantError): async_add_external_statistics(hass, external_metadata, (external_statistics, )) wait_recording_done(hass) assert statistics_during_period(hass, zero, period="hour") == {} assert list_statistic_ids(hass) == [] assert get_metadata(hass, statistic_ids=("test:total_energy_import", )) == {}
def test_delete_duplicates(caplog, tmpdir): """Test removal of duplicated statistics.""" test_db_file = tmpdir.mkdir("sqlite").join("test_run_info.db") dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" module = "tests.components.recorder.models_schema_23" importlib.import_module(module) old_models = sys.modules[module] period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00")) period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00")) period3 = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00")) period4 = dt_util.as_utc(dt_util.parse_datetime("2021-10-31 23:00:00")) external_energy_statistics_1 = ( { "start": period1, "last_reset": None, "state": 0, "sum": 2, }, { "start": period2, "last_reset": None, "state": 1, "sum": 3, }, { "start": period3, "last_reset": None, "state": 2, "sum": 4, }, { "start": period4, "last_reset": None, "state": 3, "sum": 5, }, { "start": period4, "last_reset": None, "state": 3, "sum": 5, }, ) external_energy_metadata_1 = { "has_mean": False, "has_sum": True, "name": "Total imported energy", "source": "test", "statistic_id": "test:total_energy_import_tariff_1", "unit_of_measurement": "kWh", } external_energy_statistics_2 = ( { "start": period1, "last_reset": None, "state": 0, "sum": 20, }, { "start": period2, "last_reset": None, "state": 1, "sum": 30, }, { "start": period3, "last_reset": None, "state": 2, "sum": 40, }, { "start": period4, "last_reset": None, "state": 3, "sum": 50, }, { "start": period4, "last_reset": None, "state": 3, "sum": 50, }, ) external_energy_metadata_2 = { "has_mean": False, "has_sum": True, "name": "Total imported energy", "source": "test", "statistic_id": "test:total_energy_import_tariff_2", "unit_of_measurement": "kWh", } external_co2_statistics = ( { "start": period1, "last_reset": None, "mean": 10, }, { "start": period2, "last_reset": None, "mean": 30, }, { "start": period3, "last_reset": None, "mean": 60, }, { "start": period4, "last_reset": None, "mean": 90, }, ) external_co2_metadata = { "has_mean": True, "has_sum": False, "name": "Fossil percentage", "source": "test", "statistic_id": "test:fossil_percentage", "unit_of_measurement": "%", } # Create some duplicated statistics with schema version 23 with patch.object(recorder, "models", old_models), patch.object( recorder.migration, "SCHEMA_VERSION", old_models.SCHEMA_VERSION), patch( "homeassistant.components.recorder.create_engine", new=_create_engine_test): hass = get_test_home_assistant() setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) wait_recording_done(hass) wait_recording_done(hass) with session_scope(hass=hass) as session: session.add( recorder.models.StatisticsMeta.from_meta( external_energy_metadata_1)) session.add( recorder.models.StatisticsMeta.from_meta( external_energy_metadata_2)) session.add( recorder.models.StatisticsMeta.from_meta( external_co2_metadata)) with session_scope(hass=hass) as session: for stat in external_energy_statistics_1: session.add(recorder.models.Statistics.from_stats(1, stat)) for stat in external_energy_statistics_2: session.add(recorder.models.Statistics.from_stats(2, stat)) for stat in external_co2_statistics: session.add(recorder.models.Statistics.from_stats(3, stat)) hass.stop() dt_util.DEFAULT_TIME_ZONE = ORIG_TZ # Test that the duplicates are removed during migration from schema 23 hass = get_test_home_assistant() setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) hass.start() wait_recording_done(hass) wait_recording_done(hass) hass.stop() dt_util.DEFAULT_TIME_ZONE = ORIG_TZ assert "Deleted 2 duplicated statistics rows" in caplog.text assert "Found non identical" not in caplog.text assert "Found duplicated" not in caplog.text
def test_monthly_statistics(hass_recorder, caplog, timezone): """Test inserting external statistics.""" dt_util.set_default_time_zone(dt_util.get_time_zone(timezone)) hass = hass_recorder() wait_recording_done(hass) assert "Compiling statistics for" not in caplog.text assert "Statistics already compiled" not in caplog.text zero = dt_util.utcnow() period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00")) period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00")) period3 = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00")) period4 = dt_util.as_utc(dt_util.parse_datetime("2021-10-31 23:00:00")) external_statistics = ( { "start": period1, "last_reset": None, "state": 0, "sum": 2, }, { "start": period2, "last_reset": None, "state": 1, "sum": 3, }, { "start": period3, "last_reset": None, "state": 2, "sum": 4, }, { "start": period4, "last_reset": None, "state": 3, "sum": 5, }, ) external_metadata = { "has_mean": False, "has_sum": True, "name": "Total imported energy", "source": "test", "statistic_id": "test:total_energy_import", "unit_of_measurement": "kWh", } async_add_external_statistics(hass, external_metadata, external_statistics) wait_recording_done(hass) stats = statistics_during_period(hass, zero, period="month") sep_start = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00")) sep_end = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00")) oct_start = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00")) oct_end = dt_util.as_utc(dt_util.parse_datetime("2021-11-01 00:00:00")) assert stats == { "test:total_energy_import": [ { "statistic_id": "test:total_energy_import", "start": sep_start.isoformat(), "end": sep_end.isoformat(), "max": None, "mean": None, "min": None, "last_reset": None, "state": approx(1.0), "sum": approx(3.0), }, { "statistic_id": "test:total_energy_import", "start": oct_start.isoformat(), "end": oct_end.isoformat(), "max": None, "mean": None, "min": None, "last_reset": None, "state": approx(3.0), "sum": approx(5.0), }, ] } dt_util.set_default_time_zone(dt_util.get_time_zone("UTC"))