コード例 #1
0
def test_rename_entity(hass_recorder):
    """Test statistics is migrated when entity_id is changed."""
    hass = hass_recorder()
    recorder = hass.data[DATA_INSTANCE]
    setup_component(hass, "sensor", {})

    entity_reg = mock_registry(hass)
    reg_entry = entity_reg.async_get_or_create(
        "sensor",
        "test",
        "unique_0000",
        suggested_object_id="test1",
    )
    assert reg_entry.entity_id == "sensor.test1"

    zero, four, states = record_states(hass)
    hist = history.get_significant_states(hass, zero, four)
    assert dict(states) == dict(hist)

    for kwargs in ({}, {"statistic_ids": ["sensor.test1"]}):
        stats = statistics_during_period(hass, zero, period="5minute", **kwargs)
        assert stats == {}
    stats = get_last_short_term_statistics(hass, 0, "sensor.test1", True)
    assert stats == {}

    recorder.do_adhoc_statistics(start=zero)
    wait_recording_done(hass)
    expected_1 = {
        "statistic_id": "sensor.test1",
        "start": process_timestamp_to_utc_isoformat(zero),
        "end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
        "mean": approx(14.915254237288135),
        "min": approx(10.0),
        "max": approx(20.0),
        "last_reset": None,
        "state": None,
        "sum": None,
    }
    expected_stats1 = [
        {**expected_1, "statistic_id": "sensor.test1"},
    ]
    expected_stats2 = [
        {**expected_1, "statistic_id": "sensor.test2"},
    ]
    expected_stats99 = [
        {**expected_1, "statistic_id": "sensor.test99"},
    ]

    stats = statistics_during_period(hass, zero, period="5minute")
    assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2}

    entity_reg.async_update_entity(reg_entry.entity_id, new_entity_id="sensor.test99")
    hass.block_till_done()

    stats = statistics_during_period(hass, zero, period="5minute")
    assert stats == {"sensor.test99": expected_stats99, "sensor.test2": expected_stats2}
コード例 #2
0
def test_compile_hourly_statistics_partially_unavailable(
        hass_recorder, caplog):
    """Test compiling hourly statistics, with the sensor being partially unavailable."""
    zero = dt_util.utcnow()
    hass = hass_recorder()
    recorder = hass.data[DATA_INSTANCE]
    setup_component(hass, "sensor", {})
    four, states = record_states_partially_unavailable(
        hass, zero, "sensor.test1", TEMPERATURE_SENSOR_ATTRIBUTES)
    hist = history.get_significant_states(hass, zero, four)
    assert dict(states) == dict(hist)

    recorder.do_adhoc_statistics(period="hourly", start=zero)
    wait_recording_done(hass)
    stats = statistics_during_period(hass, zero)
    assert stats == {
        "sensor.test1": [{
            "statistic_id": "sensor.test1",
            "start": process_timestamp_to_utc_isoformat(zero),
            "mean": approx(21.1864406779661),
            "min": approx(10.0),
            "max": approx(25.0),
            "last_reset": None,
            "state": None,
            "sum": None,
        }]
    }
    assert "Error while processing event StatisticsTask" not in caplog.text
コード例 #3
0
def test_compile_hourly_statistics_unchanged(hass_recorder, caplog,
                                             device_class, unit, value):
    """Test compiling hourly statistics, with no changes during the hour."""
    zero = dt_util.utcnow()
    hass = hass_recorder()
    recorder = hass.data[DATA_INSTANCE]
    setup_component(hass, "sensor", {})
    attributes = {
        "device_class": device_class,
        "state_class": "measurement",
        "unit_of_measurement": unit,
    }
    four, states = record_states(hass, zero, "sensor.test1", attributes)
    hist = history.get_significant_states(hass, zero, four)
    assert dict(states) == dict(hist)

    recorder.do_adhoc_statistics(period="hourly", start=four)
    wait_recording_done(hass)
    stats = statistics_during_period(hass, four)
    assert stats == {
        "sensor.test1": [{
            "statistic_id": "sensor.test1",
            "start": process_timestamp_to_utc_isoformat(four),
            "mean": approx(value),
            "min": approx(value),
            "max": approx(value),
            "last_reset": None,
            "state": None,
            "sum": None,
        }]
    }
    assert "Error while processing event StatisticsTask" not in caplog.text
コード例 #4
0
ファイル: test_statistics.py プロジェクト: FrnchFrgg/core
def test_compile_hourly_statistics(hass_recorder):
    """Test compiling hourly statistics."""
    hass = hass_recorder()
    recorder = hass.data[DATA_INSTANCE]
    setup_component(hass, "sensor", {})
    zero, four, states = record_states(hass)
    hist = history.get_significant_states(hass, zero, four)
    assert dict(states) == dict(hist)

    recorder.do_adhoc_statistics(period="hourly", start=zero)
    wait_recording_done(hass)
    stats = statistics_during_period(hass, zero)
    assert stats == {
        "sensor.test1": [
            {
                "statistic_id": "sensor.test1",
                "start": process_timestamp_to_utc_isoformat(zero),
                "mean": 14.915254237288135,
                "min": 10.0,
                "max": 20.0,
                "last_reset": None,
                "state": None,
                "sum": None,
            }
        ]
    }
コード例 #5
0
def test_compile_hourly_energy_statistics(hass_recorder):
    """Test compiling hourly statistics."""
    hass = hass_recorder()
    recorder = hass.data[DATA_INSTANCE]
    setup_component(hass, "sensor", {})
    sns1_attr = {"device_class": "energy", "state_class": "measurement"}
    sns2_attr = {"device_class": "energy"}
    sns3_attr = {}

    zero, four, eight, states = record_energy_states(
        hass, sns1_attr, sns2_attr, sns3_attr
    )
    hist = history.get_significant_states(
        hass, zero - timedelta.resolution, eight + timedelta.resolution
    )
    assert dict(states)["sensor.test1"] == dict(hist)["sensor.test1"]

    recorder.do_adhoc_statistics(period="hourly", start=zero)
    wait_recording_done(hass)
    recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=1))
    wait_recording_done(hass)
    recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=2))
    wait_recording_done(hass)
    stats = statistics_during_period(hass, zero)
    assert stats == {
        "sensor.test1": [
            {
                "statistic_id": "sensor.test1",
                "start": process_timestamp_to_utc_isoformat(zero),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": process_timestamp_to_utc_isoformat(zero),
                "state": 20.0,
                "sum": 10.0,
            },
            {
                "statistic_id": "sensor.test1",
                "start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": process_timestamp_to_utc_isoformat(four),
                "state": 40.0,
                "sum": 10.0,
            },
            {
                "statistic_id": "sensor.test1",
                "start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": process_timestamp_to_utc_isoformat(four),
                "state": 70.0,
                "sum": 40.0,
            },
        ]
    }
コード例 #6
0
def test_compile_hourly_statistics_unsupported(hass_recorder, caplog,
                                               attributes):
    """Test compiling hourly statistics for unsupported sensor."""
    attributes = dict(attributes)
    zero = dt_util.utcnow()
    hass = hass_recorder()
    recorder = hass.data[DATA_INSTANCE]
    setup_component(hass, "sensor", {})
    four, states = record_states(hass, zero, "sensor.test1", attributes)
    if "unit_of_measurement" in attributes:
        attributes["unit_of_measurement"] = "invalid"
        _, _states = record_states(hass, zero, "sensor.test2", attributes)
        states = {**states, **_states}
        attributes.pop("unit_of_measurement")
        _, _states = record_states(hass, zero, "sensor.test3", attributes)
        states = {**states, **_states}
    attributes["state_class"] = "invalid"
    _, _states = record_states(hass, zero, "sensor.test4", attributes)
    states = {**states, **_states}
    attributes.pop("state_class")
    _, _states = record_states(hass, zero, "sensor.test5", attributes)
    states = {**states, **_states}
    attributes["state_class"] = "measurement"
    _, _states = record_states(hass, zero, "sensor.test6", attributes)
    states = {**states, **_states}
    attributes["state_class"] = "unsupported"
    _, _states = record_states(hass, zero, "sensor.test7", attributes)
    states = {**states, **_states}

    hist = history.get_significant_states(hass, zero, four)
    assert dict(states) == dict(hist)

    recorder.do_adhoc_statistics(period="hourly", start=zero)
    wait_recording_done(hass)
    statistic_ids = list_statistic_ids(hass)
    assert statistic_ids == [{
        "statistic_id": "sensor.test1",
        "unit_of_measurement": "°C"
    }]
    stats = statistics_during_period(hass, zero)
    assert stats == {
        "sensor.test1": [{
            "statistic_id": "sensor.test1",
            "start": process_timestamp_to_utc_isoformat(zero),
            "mean": approx(16.440677966101696),
            "min": approx(10.0),
            "max": approx(30.0),
            "last_reset": None,
            "state": None,
            "sum": None,
        }]
    }
    assert "Error while processing event StatisticsTask" not in caplog.text
コード例 #7
0
def test_compile_periodic_statistics_exception(
    hass_recorder, mock_sensor_statistics, mock_from_stats
):
    """Test exception handling when compiling periodic statistics."""

    hass = hass_recorder()
    recorder = hass.data[DATA_INSTANCE]
    setup_component(hass, "sensor", {})

    now = dt_util.utcnow()
    recorder.do_adhoc_statistics(start=now)
    recorder.do_adhoc_statistics(start=now + timedelta(minutes=5))
    wait_recording_done(hass)
    expected_1 = {
        "statistic_id": "sensor.test1",
        "start": process_timestamp_to_utc_isoformat(now),
        "end": process_timestamp_to_utc_isoformat(now + timedelta(minutes=5)),
        "mean": None,
        "min": None,
        "max": None,
        "last_reset": None,
        "state": None,
        "sum": None,
    }
    expected_2 = {
        "statistic_id": "sensor.test1",
        "start": process_timestamp_to_utc_isoformat(now + timedelta(minutes=5)),
        "end": process_timestamp_to_utc_isoformat(now + timedelta(minutes=10)),
        "mean": None,
        "min": None,
        "max": None,
        "last_reset": None,
        "state": None,
        "sum": None,
    }
    expected_stats1 = [
        {**expected_1, "statistic_id": "sensor.test1"},
        {**expected_2, "statistic_id": "sensor.test1"},
    ]
    expected_stats2 = [
        {**expected_2, "statistic_id": "sensor.test2"},
    ]
    expected_stats3 = [
        {**expected_1, "statistic_id": "sensor.test3"},
        {**expected_2, "statistic_id": "sensor.test3"},
    ]

    stats = statistics_during_period(hass, now, period="5minute")
    assert stats == {
        "sensor.test1": expected_stats1,
        "sensor.test2": expected_stats2,
        "sensor.test3": expected_stats3,
    }
コード例 #8
0
def test_compile_hourly_statistics_unavailable(hass_recorder):
    """Test compiling hourly statistics, with the sensor being unavailable."""
    hass = hass_recorder()
    recorder = hass.data[DATA_INSTANCE]
    setup_component(hass, "sensor", {})
    zero, four, states = record_states_partially_unavailable(hass)
    hist = history.get_significant_states(hass, zero, four)
    assert dict(states) == dict(hist)

    recorder.do_adhoc_statistics(period="hourly", start=four)
    wait_recording_done(hass)
    stats = statistics_during_period(hass, four)
    assert stats == {}
コード例 #9
0
ファイル: __init__.py プロジェクト: jbouwh/core
def _ws_get_statistics_during_period(
    hass: HomeAssistant,
    msg_id: int,
    start_time: dt,
    end_time: dt | None = None,
    statistic_ids: list[str] | None = None,
    period: Literal["5minute", "day", "hour", "month"] = "hour",
) -> str:
    """Fetch statistics and convert them to json in the executor."""
    return JSON_DUMP(
        messages.result_message(
            msg_id,
            statistics_during_period(hass, start_time, end_time, statistic_ids, period),
        )
    )
コード例 #10
0
def test_compile_hourly_statistics_partially_unavailable(hass_recorder):
    """Test compiling hourly statistics, with the sensor being partially unavailable."""
    hass = hass_recorder()
    recorder = hass.data[DATA_INSTANCE]
    setup_component(hass, "sensor", {})
    zero, four, states = record_states_partially_unavailable(hass)
    hist = history.get_significant_states(hass, zero, four)
    assert dict(states) == dict(hist)

    recorder.do_adhoc_statistics(period="hourly", start=zero)
    wait_recording_done(hass)
    stats = statistics_during_period(hass, zero)
    assert stats == {
        "sensor.test1": [{
            "statistic_id": "sensor.test1",
            "start": process_timestamp_to_utc_isoformat(zero),
            "mean": approx(21.1864406779661),
            "min": approx(10.0),
            "max": approx(25.0),
            "last_reset": None,
            "state": None,
            "sum": None,
        }]
    }
コード例 #11
0
def test_monthly_statistics(hass_recorder, caplog, timezone):
    """Test inserting external statistics."""
    dt_util.set_default_time_zone(dt_util.get_time_zone(timezone))

    hass = hass_recorder()
    wait_recording_done(hass)
    assert "Compiling statistics for" not in caplog.text
    assert "Statistics already compiled" not in caplog.text

    zero = dt_util.utcnow()
    period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
    period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00"))
    period3 = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
    period4 = dt_util.as_utc(dt_util.parse_datetime("2021-10-31 23:00:00"))

    external_statistics = (
        {
            "start": period1,
            "last_reset": None,
            "state": 0,
            "sum": 2,
        },
        {
            "start": period2,
            "last_reset": None,
            "state": 1,
            "sum": 3,
        },
        {
            "start": period3,
            "last_reset": None,
            "state": 2,
            "sum": 4,
        },
        {
            "start": period4,
            "last_reset": None,
            "state": 3,
            "sum": 5,
        },
    )
    external_metadata = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_energy_import",
        "unit_of_measurement": "kWh",
    }

    async_add_external_statistics(hass, external_metadata, external_statistics)
    wait_recording_done(hass)
    stats = statistics_during_period(hass, zero, period="month")
    sep_start = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
    sep_end = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
    oct_start = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
    oct_end = dt_util.as_utc(dt_util.parse_datetime("2021-11-01 00:00:00"))
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": sep_start.isoformat(),
                "end": sep_end.isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": oct_start.isoformat(),
                "end": oct_end.isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(3.0),
                "sum": approx(5.0),
            },
        ]
    }

    dt_util.set_default_time_zone(dt_util.get_time_zone("UTC"))
コード例 #12
0
def test_compile_hourly_statistics(hass_recorder):
    """Test compiling hourly statistics."""
    hass = hass_recorder()
    recorder = hass.data[DATA_INSTANCE]
    setup_component(hass, "sensor", {})
    zero, four, states = record_states(hass)
    hist = history.get_significant_states(hass, zero, four)
    assert dict(states) == dict(hist)

    for kwargs in ({}, {"statistic_ids": ["sensor.test1"]}):
        stats = statistics_during_period(hass, zero, **kwargs)
        assert stats == {}
    stats = get_last_statistics(hass, 0, "sensor.test1")
    assert stats == {}

    recorder.do_adhoc_statistics(period="hourly", start=zero)
    recorder.do_adhoc_statistics(period="hourly", start=four)
    wait_recording_done(hass)
    expected_1 = {
        "statistic_id": "sensor.test1",
        "start": process_timestamp_to_utc_isoformat(zero),
        "mean": approx(14.915254237288135),
        "min": approx(10.0),
        "max": approx(20.0),
        "last_reset": None,
        "state": None,
        "sum": None,
    }
    expected_2 = {
        "statistic_id": "sensor.test1",
        "start": process_timestamp_to_utc_isoformat(four),
        "mean": approx(20.0),
        "min": approx(20.0),
        "max": approx(20.0),
        "last_reset": None,
        "state": None,
        "sum": None,
    }
    expected_stats1 = [
        {
            **expected_1, "statistic_id": "sensor.test1"
        },
        {
            **expected_2, "statistic_id": "sensor.test1"
        },
    ]
    expected_stats2 = [
        {
            **expected_1, "statistic_id": "sensor.test2"
        },
        {
            **expected_2, "statistic_id": "sensor.test2"
        },
    ]

    # Test statistics_during_period
    stats = statistics_during_period(hass, zero)
    assert stats == {
        "sensor.test1": expected_stats1,
        "sensor.test2": expected_stats2
    }

    stats = statistics_during_period(hass,
                                     zero,
                                     statistic_ids=["sensor.test2"])
    assert stats == {"sensor.test2": expected_stats2}

    stats = statistics_during_period(hass,
                                     zero,
                                     statistic_ids=["sensor.test3"])
    assert stats == {}

    # Test get_last_statistics
    stats = get_last_statistics(hass, 0, "sensor.test1")
    assert stats == {}

    stats = get_last_statistics(hass, 1, "sensor.test1")
    assert stats == {
        "sensor.test1": [{
            **expected_2, "statistic_id": "sensor.test1"
        }]
    }

    stats = get_last_statistics(hass, 2, "sensor.test1")
    assert stats == {"sensor.test1": expected_stats1[::-1]}

    stats = get_last_statistics(hass, 3, "sensor.test1")
    assert stats == {"sensor.test1": expected_stats1[::-1]}

    stats = get_last_statistics(hass, 1, "sensor.test3")
    assert stats == {}
コード例 #13
0
def test_compile_hourly_statistics(hass_recorder):
    """Test compiling hourly statistics."""
    hass = hass_recorder()
    recorder = hass.data[DATA_INSTANCE]
    setup_component(hass, "sensor", {})
    zero, four, states = record_states(hass)
    hist = history.get_significant_states(hass, zero, four)
    assert dict(states) == dict(hist)

    # Should not fail if there is nothing there yet
    stats = get_latest_short_term_statistics(hass, ["sensor.test1"])
    assert stats == {}

    for kwargs in ({}, {"statistic_ids": ["sensor.test1"]}):
        stats = statistics_during_period(hass,
                                         zero,
                                         period="5minute",
                                         **kwargs)
        assert stats == {}
    stats = get_last_short_term_statistics(hass, 0, "sensor.test1", True)
    assert stats == {}

    recorder.do_adhoc_statistics(start=zero)
    recorder.do_adhoc_statistics(start=four)
    wait_recording_done(hass)
    expected_1 = {
        "statistic_id": "sensor.test1",
        "start": process_timestamp_to_utc_isoformat(zero),
        "end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
        "mean": approx(14.915254237288135),
        "min": approx(10.0),
        "max": approx(20.0),
        "last_reset": None,
        "state": None,
        "sum": None,
    }
    expected_2 = {
        "statistic_id": "sensor.test1",
        "start": process_timestamp_to_utc_isoformat(four),
        "end": process_timestamp_to_utc_isoformat(four + timedelta(minutes=5)),
        "mean": approx(20.0),
        "min": approx(20.0),
        "max": approx(20.0),
        "last_reset": None,
        "state": None,
        "sum": None,
    }
    expected_stats1 = [
        {
            **expected_1, "statistic_id": "sensor.test1"
        },
        {
            **expected_2, "statistic_id": "sensor.test1"
        },
    ]
    expected_stats2 = [
        {
            **expected_1, "statistic_id": "sensor.test2"
        },
        {
            **expected_2, "statistic_id": "sensor.test2"
        },
    ]

    # Test statistics_during_period
    stats = statistics_during_period(hass, zero, period="5minute")
    assert stats == {
        "sensor.test1": expected_stats1,
        "sensor.test2": expected_stats2
    }

    stats = statistics_during_period(hass,
                                     zero,
                                     statistic_ids=["sensor.test2"],
                                     period="5minute")
    assert stats == {"sensor.test2": expected_stats2}

    stats = statistics_during_period(hass,
                                     zero,
                                     statistic_ids=["sensor.test3"],
                                     period="5minute")
    assert stats == {}

    # Test get_last_short_term_statistics and get_latest_short_term_statistics
    stats = get_last_short_term_statistics(hass, 0, "sensor.test1", True)
    assert stats == {}

    stats = get_last_short_term_statistics(hass, 1, "sensor.test1", True)
    assert stats == {
        "sensor.test1": [{
            **expected_2, "statistic_id": "sensor.test1"
        }]
    }

    stats = get_latest_short_term_statistics(hass, ["sensor.test1"])
    assert stats == {
        "sensor.test1": [{
            **expected_2, "statistic_id": "sensor.test1"
        }]
    }

    metadata = get_metadata(hass, statistic_ids=['sensor.test1"'])

    stats = get_latest_short_term_statistics(hass, ["sensor.test1"],
                                             metadata=metadata)
    assert stats == {
        "sensor.test1": [{
            **expected_2, "statistic_id": "sensor.test1"
        }]
    }

    stats = get_last_short_term_statistics(hass, 2, "sensor.test1", True)
    assert stats == {"sensor.test1": expected_stats1[::-1]}

    stats = get_last_short_term_statistics(hass, 3, "sensor.test1", True)
    assert stats == {"sensor.test1": expected_stats1[::-1]}

    stats = get_last_short_term_statistics(hass, 1, "sensor.test3", True)
    assert stats == {}

    recorder.get_session().query(StatisticsShortTerm).delete()
    # Should not fail there is nothing in the table
    stats = get_latest_short_term_statistics(hass, ["sensor.test1"])
    assert stats == {}
コード例 #14
0
def test_external_statistics_errors(hass_recorder, caplog):
    """Test validation of external statistics."""
    hass = hass_recorder()
    wait_recording_done(hass)
    assert "Compiling statistics for" not in caplog.text
    assert "Statistics already compiled" not in caplog.text

    zero = dt_util.utcnow()
    period1 = zero.replace(minute=0, second=0,
                           microsecond=0) + timedelta(hours=1)

    _external_statistics = {
        "start": period1,
        "last_reset": None,
        "state": 0,
        "sum": 2,
    }

    _external_metadata = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_energy_import",
        "unit_of_measurement": "kWh",
    }

    # Attempt to insert statistics for an entity
    external_metadata = {
        **_external_metadata,
        "statistic_id": "sensor.total_energy_import",
    }
    external_statistics = {**_external_statistics}
    with pytest.raises(HomeAssistantError):
        async_add_external_statistics(hass, external_metadata,
                                      (external_statistics, ))
    wait_recording_done(hass)
    assert statistics_during_period(hass, zero, period="hour") == {}
    assert list_statistic_ids(hass) == []
    assert get_metadata(hass,
                        statistic_ids=("sensor.total_energy_import", )) == {}

    # Attempt to insert statistics for the wrong domain
    external_metadata = {**_external_metadata, "source": "other"}
    external_statistics = {**_external_statistics}
    with pytest.raises(HomeAssistantError):
        async_add_external_statistics(hass, external_metadata,
                                      (external_statistics, ))
    wait_recording_done(hass)
    assert statistics_during_period(hass, zero, period="hour") == {}
    assert list_statistic_ids(hass) == []
    assert get_metadata(hass,
                        statistic_ids=("test:total_energy_import", )) == {}

    # Attempt to insert statistics for an naive starting time
    external_metadata = {**_external_metadata}
    external_statistics = {
        **_external_statistics,
        "start": period1.replace(tzinfo=None),
    }
    with pytest.raises(HomeAssistantError):
        async_add_external_statistics(hass, external_metadata,
                                      (external_statistics, ))
    wait_recording_done(hass)
    assert statistics_during_period(hass, zero, period="hour") == {}
    assert list_statistic_ids(hass) == []
    assert get_metadata(hass,
                        statistic_ids=("test:total_energy_import", )) == {}

    # Attempt to insert statistics for an invalid starting time
    external_metadata = {**_external_metadata}
    external_statistics = {
        **_external_statistics, "start": period1.replace(minute=1)
    }
    with pytest.raises(HomeAssistantError):
        async_add_external_statistics(hass, external_metadata,
                                      (external_statistics, ))
    wait_recording_done(hass)
    assert statistics_during_period(hass, zero, period="hour") == {}
    assert list_statistic_ids(hass) == []
    assert get_metadata(hass,
                        statistic_ids=("test:total_energy_import", )) == {}
コード例 #15
0
async def test_external_statistics(hass, hass_ws_client, caplog):
    """Test inserting external statistics."""
    client = await hass_ws_client()
    await async_init_recorder_component(hass)

    assert "Compiling statistics for" not in caplog.text
    assert "Statistics already compiled" not in caplog.text

    zero = dt_util.utcnow()
    period1 = zero.replace(minute=0, second=0,
                           microsecond=0) + timedelta(hours=1)
    period2 = zero.replace(minute=0, second=0,
                           microsecond=0) + timedelta(hours=2)

    external_statistics1 = {
        "start": period1,
        "last_reset": None,
        "state": 0,
        "sum": 2,
    }
    external_statistics2 = {
        "start": period2,
        "last_reset": None,
        "state": 1,
        "sum": 3,
    }

    external_metadata = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_energy_import",
        "unit_of_measurement": "kWh",
    }

    async_add_external_statistics(hass, external_metadata,
                                  (external_statistics1, external_statistics2))
    await async_wait_recording_done_without_instance(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(0.0),
                "sum": approx(2.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }
    statistic_ids = list_statistic_ids(hass)
    assert statistic_ids == [{
        "has_mean": False,
        "has_sum": True,
        "statistic_id": "test:total_energy_import",
        "name": "Total imported energy",
        "source": "test",
        "unit_of_measurement": "kWh",
    }]
    metadata = get_metadata(hass, statistic_ids=("test:total_energy_import", ))
    assert metadata == {
        "test:total_energy_import": (
            1,
            {
                "has_mean": False,
                "has_sum": True,
                "name": "Total imported energy",
                "source": "test",
                "statistic_id": "test:total_energy_import",
                "unit_of_measurement": "kWh",
            },
        )
    }
    last_stats = get_last_statistics(hass, 1, "test:total_energy_import", True)
    assert last_stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }

    # Update the previously inserted statistics
    external_statistics = {
        "start": period1,
        "last_reset": None,
        "state": 5,
        "sum": 6,
    }
    async_add_external_statistics(hass, external_metadata,
                                  (external_statistics, ))
    await async_wait_recording_done_without_instance(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(5.0),
                "sum": approx(6.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }

    # Update the previously inserted statistics
    external_statistics = {
        "start": period1,
        "max": 1,
        "mean": 2,
        "min": 3,
        "last_reset": None,
        "state": 4,
        "sum": 5,
    }
    async_add_external_statistics(hass, external_metadata,
                                  (external_statistics, ))
    await async_wait_recording_done_without_instance(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": approx(1.0),
                "mean": approx(2.0),
                "min": approx(3.0),
                "last_reset": None,
                "state": approx(4.0),
                "sum": approx(5.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }

    await client.send_json({
        "id": 1,
        "type": "recorder/adjust_sum_statistics",
        "statistic_id": "test:total_energy_import",
        "start_time": period2.isoformat(),
        "adjustment": 1000.0,
    })
    response = await client.receive_json()
    assert response["success"]

    await async_wait_recording_done_without_instance(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": approx(1.0),
                "mean": approx(2.0),
                "min": approx(3.0),
                "last_reset": None,
                "state": approx(4.0),
                "sum": approx(5.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(1003.0),
            },
        ]
    }
コード例 #16
0
def test_compile_hourly_energy_statistics(hass_recorder, caplog, device_class,
                                          unit, native_unit, factor):
    """Test compiling hourly statistics."""
    zero = dt_util.utcnow()
    hass = hass_recorder()
    recorder = hass.data[DATA_INSTANCE]
    setup_component(hass, "sensor", {})
    attributes = {
        "device_class": device_class,
        "state_class": "measurement",
        "unit_of_measurement": unit,
        "last_reset": None,
    }
    seq = [10, 15, 20, 10, 30, 40, 50, 60, 70]

    four, eight, states = record_energy_states(hass, zero, "sensor.test1",
                                               attributes, seq)
    hist = history.get_significant_states(hass, zero - timedelta.resolution,
                                          eight + timedelta.resolution)
    assert dict(states)["sensor.test1"] == dict(hist)["sensor.test1"]

    recorder.do_adhoc_statistics(period="hourly", start=zero)
    wait_recording_done(hass)
    recorder.do_adhoc_statistics(period="hourly",
                                 start=zero + timedelta(hours=1))
    wait_recording_done(hass)
    recorder.do_adhoc_statistics(period="hourly",
                                 start=zero + timedelta(hours=2))
    wait_recording_done(hass)
    statistic_ids = list_statistic_ids(hass)
    assert statistic_ids == [{
        "statistic_id": "sensor.test1",
        "unit_of_measurement": native_unit
    }]
    stats = statistics_during_period(hass, zero)
    assert stats == {
        "sensor.test1": [
            {
                "statistic_id": "sensor.test1",
                "start": process_timestamp_to_utc_isoformat(zero),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": process_timestamp_to_utc_isoformat(zero),
                "state": approx(factor * seq[2]),
                "sum": approx(factor * 10.0),
            },
            {
                "statistic_id":
                "sensor.test1",
                "start":
                process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)),
                "max":
                None,
                "mean":
                None,
                "min":
                None,
                "last_reset":
                process_timestamp_to_utc_isoformat(four),
                "state":
                approx(factor * seq[5]),
                "sum":
                approx(factor * 10.0),
            },
            {
                "statistic_id":
                "sensor.test1",
                "start":
                process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)),
                "max":
                None,
                "mean":
                None,
                "min":
                None,
                "last_reset":
                process_timestamp_to_utc_isoformat(four),
                "state":
                approx(factor * seq[8]),
                "sum":
                approx(factor * 40.0),
            },
        ]
    }
    assert "Error while processing event StatisticsTask" not in caplog.text
コード例 #17
0
def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog):
    """Test compiling multiple hourly statistics."""
    zero = dt_util.utcnow()
    hass = hass_recorder()
    recorder = hass.data[DATA_INSTANCE]
    setup_component(hass, "sensor", {})
    sns1_attr = {**ENERGY_SENSOR_ATTRIBUTES, "last_reset": None}
    sns2_attr = {**ENERGY_SENSOR_ATTRIBUTES, "last_reset": None}
    sns3_attr = {
        **ENERGY_SENSOR_ATTRIBUTES,
        "unit_of_measurement": "Wh",
        "last_reset": None,
    }
    sns4_attr = {**ENERGY_SENSOR_ATTRIBUTES}
    seq1 = [10, 15, 20, 10, 30, 40, 50, 60, 70]
    seq2 = [110, 120, 130, 0, 30, 45, 55, 65, 75]
    seq3 = [0, 0, 5, 10, 30, 50, 60, 80, 90]
    seq4 = [0, 0, 5, 10, 30, 50, 60, 80, 90]

    four, eight, states = record_energy_states(hass, zero, "sensor.test1",
                                               sns1_attr, seq1)
    _, _, _states = record_energy_states(hass, zero, "sensor.test2", sns2_attr,
                                         seq2)
    states = {**states, **_states}
    _, _, _states = record_energy_states(hass, zero, "sensor.test3", sns3_attr,
                                         seq3)
    states = {**states, **_states}
    _, _, _states = record_energy_states(hass, zero, "sensor.test4", sns4_attr,
                                         seq4)
    states = {**states, **_states}
    hist = history.get_significant_states(hass, zero - timedelta.resolution,
                                          eight + timedelta.resolution)
    assert dict(states)["sensor.test1"] == dict(hist)["sensor.test1"]

    recorder.do_adhoc_statistics(period="hourly", start=zero)
    wait_recording_done(hass)
    recorder.do_adhoc_statistics(period="hourly",
                                 start=zero + timedelta(hours=1))
    wait_recording_done(hass)
    recorder.do_adhoc_statistics(period="hourly",
                                 start=zero + timedelta(hours=2))
    wait_recording_done(hass)
    statistic_ids = list_statistic_ids(hass)
    assert statistic_ids == [
        {
            "statistic_id": "sensor.test1",
            "unit_of_measurement": "kWh"
        },
        {
            "statistic_id": "sensor.test2",
            "unit_of_measurement": "kWh"
        },
        {
            "statistic_id": "sensor.test3",
            "unit_of_measurement": "kWh"
        },
    ]
    stats = statistics_during_period(hass, zero)
    assert stats == {
        "sensor.test1": [
            {
                "statistic_id": "sensor.test1",
                "start": process_timestamp_to_utc_isoformat(zero),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": process_timestamp_to_utc_isoformat(zero),
                "state": approx(20.0),
                "sum": approx(10.0),
            },
            {
                "statistic_id":
                "sensor.test1",
                "start":
                process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)),
                "max":
                None,
                "mean":
                None,
                "min":
                None,
                "last_reset":
                process_timestamp_to_utc_isoformat(four),
                "state":
                approx(40.0),
                "sum":
                approx(10.0),
            },
            {
                "statistic_id":
                "sensor.test1",
                "start":
                process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)),
                "max":
                None,
                "mean":
                None,
                "min":
                None,
                "last_reset":
                process_timestamp_to_utc_isoformat(four),
                "state":
                approx(70.0),
                "sum":
                approx(40.0),
            },
        ],
        "sensor.test2": [
            {
                "statistic_id": "sensor.test2",
                "start": process_timestamp_to_utc_isoformat(zero),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": process_timestamp_to_utc_isoformat(zero),
                "state": approx(130.0),
                "sum": approx(20.0),
            },
            {
                "statistic_id":
                "sensor.test2",
                "start":
                process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)),
                "max":
                None,
                "mean":
                None,
                "min":
                None,
                "last_reset":
                process_timestamp_to_utc_isoformat(four),
                "state":
                approx(45.0),
                "sum":
                approx(-95.0),
            },
            {
                "statistic_id":
                "sensor.test2",
                "start":
                process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)),
                "max":
                None,
                "mean":
                None,
                "min":
                None,
                "last_reset":
                process_timestamp_to_utc_isoformat(four),
                "state":
                approx(75.0),
                "sum":
                approx(-65.0),
            },
        ],
        "sensor.test3": [
            {
                "statistic_id": "sensor.test3",
                "start": process_timestamp_to_utc_isoformat(zero),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": process_timestamp_to_utc_isoformat(zero),
                "state": approx(5.0 / 1000),
                "sum": approx(5.0 / 1000),
            },
            {
                "statistic_id":
                "sensor.test3",
                "start":
                process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)),
                "max":
                None,
                "mean":
                None,
                "min":
                None,
                "last_reset":
                process_timestamp_to_utc_isoformat(four),
                "state":
                approx(50.0 / 1000),
                "sum":
                approx(30.0 / 1000),
            },
            {
                "statistic_id":
                "sensor.test3",
                "start":
                process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)),
                "max":
                None,
                "mean":
                None,
                "min":
                None,
                "last_reset":
                process_timestamp_to_utc_isoformat(four),
                "state":
                approx(90.0 / 1000),
                "sum":
                approx(70.0 / 1000),
            },
        ],
    }
    assert "Error while processing event StatisticsTask" not in caplog.text
コード例 #18
0
ファイル: test_statistics.py プロジェクト: jcgoette/core
def test_rename_entity_collision(hass_recorder, caplog):
    """Test statistics is migrated when entity_id is changed."""
    hass = hass_recorder()
    setup_component(hass, "sensor", {})

    entity_reg = mock_registry(hass)

    @callback
    def add_entry():
        reg_entry = entity_reg.async_get_or_create(
            "sensor",
            "test",
            "unique_0000",
            suggested_object_id="test1",
        )
        assert reg_entry.entity_id == "sensor.test1"

    hass.add_job(add_entry)
    hass.block_till_done()

    zero, four, states = record_states(hass)
    hist = history.get_significant_states(hass, zero, four)
    assert dict(states) == dict(hist)

    for kwargs in ({}, {"statistic_ids": ["sensor.test1"]}):
        stats = statistics_during_period(hass, zero, period="5minute", **kwargs)
        assert stats == {}
    stats = get_last_short_term_statistics(hass, 0, "sensor.test1", True)
    assert stats == {}

    do_adhoc_statistics(hass, start=zero)
    wait_recording_done(hass)
    expected_1 = {
        "statistic_id": "sensor.test1",
        "start": process_timestamp_to_utc_isoformat(zero),
        "end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
        "mean": approx(14.915254237288135),
        "min": approx(10.0),
        "max": approx(20.0),
        "last_reset": None,
        "state": None,
        "sum": None,
    }
    expected_stats1 = [
        {**expected_1, "statistic_id": "sensor.test1"},
    ]
    expected_stats2 = [
        {**expected_1, "statistic_id": "sensor.test2"},
    ]

    stats = statistics_during_period(hass, zero, period="5minute")
    assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2}

    # Insert metadata for sensor.test99
    metadata_1 = {
        "has_mean": True,
        "has_sum": False,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "sensor.test99",
        "unit_of_measurement": "kWh",
    }

    with session_scope(hass=hass) as session:
        session.add(recorder.models.StatisticsMeta.from_meta(metadata_1))

    # Rename entity sensor.test1 to sensor.test99
    @callback
    def rename_entry():
        entity_reg.async_update_entity("sensor.test1", new_entity_id="sensor.test99")

    hass.add_job(rename_entry)
    wait_recording_done(hass)

    # Statistics failed to migrate due to the collision
    stats = statistics_during_period(hass, zero, period="5minute")
    assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2}
    assert "Blocked attempt to insert duplicated statistic rows" in caplog.text
コード例 #19
0
def test_external_statistics(hass_recorder, caplog):
    """Test inserting external statistics."""
    hass = hass_recorder()
    wait_recording_done(hass)
    assert "Compiling statistics for" not in caplog.text
    assert "Statistics already compiled" not in caplog.text

    zero = dt_util.utcnow()
    period1 = zero.replace(minute=0, second=0, microsecond=0) + timedelta(hours=1)
    period2 = zero.replace(minute=0, second=0, microsecond=0) + timedelta(hours=2)

    external_statistics1 = {
        "start": period1,
        "last_reset": None,
        "state": 0,
        "sum": 2,
    }
    external_statistics2 = {
        "start": period2,
        "last_reset": None,
        "state": 1,
        "sum": 3,
    }

    external_metadata = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_energy_import",
        "unit_of_measurement": "kWh",
    }

    async_add_external_statistics(
        hass, external_metadata, (external_statistics1, external_statistics2)
    )
    wait_recording_done(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(0.0),
                "sum": approx(2.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }
    statistic_ids = list_statistic_ids(hass)
    assert statistic_ids == [
        {
            "statistic_id": "test:total_energy_import",
            "name": "Total imported energy",
            "source": "test",
            "unit_of_measurement": "kWh",
        }
    ]
    metadata = get_metadata(hass, statistic_ids=("test:total_energy_import",))
    assert metadata == {
        "test:total_energy_import": (
            1,
            {
                "has_mean": False,
                "has_sum": True,
                "name": "Total imported energy",
                "source": "test",
                "statistic_id": "test:total_energy_import",
                "unit_of_measurement": "kWh",
            },
        )
    }

    # Update the previously inserted statistics
    external_statistics = {
        "start": period1,
        "last_reset": None,
        "state": 5,
        "sum": 6,
    }
    async_add_external_statistics(hass, external_metadata, (external_statistics,))
    wait_recording_done(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(5.0),
                "sum": approx(6.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }

    # Update the previously inserted statistics
    external_statistics = {
        "start": period1,
        "max": 1,
        "mean": 2,
        "min": 3,
        "last_reset": None,
        "state": 4,
        "sum": 5,
    }
    async_add_external_statistics(hass, external_metadata, (external_statistics,))
    wait_recording_done(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": approx(1.0),
                "mean": approx(2.0),
                "min": approx(3.0),
                "last_reset": None,
                "state": approx(4.0),
                "sum": approx(5.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }