Exemple #1
0
def test_list_statistic_ids(hass_recorder, caplog, device_class, unit,
                            native_unit, statistic_type):
    """Test listing future statistic ids."""
    hass = hass_recorder()
    setup_component(hass, "sensor", {})
    attributes = {
        "device_class": device_class,
        "last_reset": 0,
        "state_class": "measurement",
        "unit_of_measurement": unit,
    }
    hass.states.set("sensor.test1", 0, attributes=attributes)
    statistic_ids = list_statistic_ids(hass)
    assert statistic_ids == [{
        "statistic_id": "sensor.test1",
        "unit_of_measurement": native_unit
    }]
    for stat_type in ["mean", "sum", "dogs"]:
        statistic_ids = list_statistic_ids(hass, statistic_type=stat_type)
        if statistic_type == stat_type:
            assert statistic_ids == [{
                "statistic_id": "sensor.test1",
                "unit_of_measurement": native_unit
            }]
        else:
            assert statistic_ids == []
Exemple #2
0
def _ws_get_list_statistic_ids(
    hass: HomeAssistant,
    msg_id: int,
    statistic_type: Literal["mean"] | Literal["sum"] | None = None,
) -> str:
    """Fetch a list of available statistic_id and convert them to json in the executor."""
    return JSON_DUMP(
        messages.result_message(msg_id, list_statistic_ids(hass, None, statistic_type))
    )
Exemple #3
0
def test_compile_hourly_statistics_unsupported(hass_recorder, caplog,
                                               attributes):
    """Test compiling hourly statistics for unsupported sensor."""
    attributes = dict(attributes)
    zero = dt_util.utcnow()
    hass = hass_recorder()
    recorder = hass.data[DATA_INSTANCE]
    setup_component(hass, "sensor", {})
    four, states = record_states(hass, zero, "sensor.test1", attributes)
    if "unit_of_measurement" in attributes:
        attributes["unit_of_measurement"] = "invalid"
        _, _states = record_states(hass, zero, "sensor.test2", attributes)
        states = {**states, **_states}
        attributes.pop("unit_of_measurement")
        _, _states = record_states(hass, zero, "sensor.test3", attributes)
        states = {**states, **_states}
    attributes["state_class"] = "invalid"
    _, _states = record_states(hass, zero, "sensor.test4", attributes)
    states = {**states, **_states}
    attributes.pop("state_class")
    _, _states = record_states(hass, zero, "sensor.test5", attributes)
    states = {**states, **_states}
    attributes["state_class"] = "measurement"
    _, _states = record_states(hass, zero, "sensor.test6", attributes)
    states = {**states, **_states}
    attributes["state_class"] = "unsupported"
    _, _states = record_states(hass, zero, "sensor.test7", attributes)
    states = {**states, **_states}

    hist = history.get_significant_states(hass, zero, four)
    assert dict(states) == dict(hist)

    recorder.do_adhoc_statistics(period="hourly", start=zero)
    wait_recording_done(hass)
    statistic_ids = list_statistic_ids(hass)
    assert statistic_ids == [{
        "statistic_id": "sensor.test1",
        "unit_of_measurement": "°C"
    }]
    stats = statistics_during_period(hass, zero)
    assert stats == {
        "sensor.test1": [{
            "statistic_id": "sensor.test1",
            "start": process_timestamp_to_utc_isoformat(zero),
            "mean": approx(16.440677966101696),
            "min": approx(10.0),
            "max": approx(30.0),
            "last_reset": None,
            "state": None,
            "sum": None,
        }]
    }
    assert "Error while processing event StatisticsTask" not in caplog.text
def test_demo_statistics(hass_recorder):
    """Test that the demo components makes some statistics available."""
    hass = hass_recorder()

    assert setup_component(hass, DOMAIN, {DOMAIN: {}})
    hass.block_till_done()
    hass.start()
    wait_recording_done(hass)

    statistic_ids = list_statistic_ids(hass)
    assert {
        "name": None,
        "source": "demo",
        "statistic_id": "demo:temperature_outdoor",
        "unit_of_measurement": "°C",
    } in statistic_ids
    assert {
        "name": None,
        "source": "demo",
        "statistic_id": "demo:energy_consumption",
        "unit_of_measurement": "kWh",
    } in statistic_ids
Exemple #5
0
def test_compile_hourly_statistics(hass_recorder, caplog, device_class, unit,
                                   native_unit, mean, min, max):
    """Test compiling hourly statistics."""
    zero = dt_util.utcnow()
    hass = hass_recorder()
    recorder = hass.data[DATA_INSTANCE]
    setup_component(hass, "sensor", {})
    attributes = {
        "device_class": device_class,
        "state_class": "measurement",
        "unit_of_measurement": unit,
    }
    four, states = record_states(hass, zero, "sensor.test1", attributes)
    hist = history.get_significant_states(hass, zero, four)
    assert dict(states) == dict(hist)

    recorder.do_adhoc_statistics(period="hourly", start=zero)
    wait_recording_done(hass)
    statistic_ids = list_statistic_ids(hass)
    assert statistic_ids == [{
        "statistic_id": "sensor.test1",
        "unit_of_measurement": native_unit
    }]
    stats = statistics_during_period(hass, zero)
    assert stats == {
        "sensor.test1": [{
            "statistic_id": "sensor.test1",
            "start": process_timestamp_to_utc_isoformat(zero),
            "mean": approx(mean),
            "min": approx(min),
            "max": approx(max),
            "last_reset": None,
            "state": None,
            "sum": None,
        }]
    }
    assert "Error while processing event StatisticsTask" not in caplog.text
def test_external_statistics_errors(hass_recorder, caplog):
    """Test validation of external statistics."""
    hass = hass_recorder()
    wait_recording_done(hass)
    assert "Compiling statistics for" not in caplog.text
    assert "Statistics already compiled" not in caplog.text

    zero = dt_util.utcnow()
    period1 = zero.replace(minute=0, second=0,
                           microsecond=0) + timedelta(hours=1)

    _external_statistics = {
        "start": period1,
        "last_reset": None,
        "state": 0,
        "sum": 2,
    }

    _external_metadata = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_energy_import",
        "unit_of_measurement": "kWh",
    }

    # Attempt to insert statistics for an entity
    external_metadata = {
        **_external_metadata,
        "statistic_id": "sensor.total_energy_import",
    }
    external_statistics = {**_external_statistics}
    with pytest.raises(HomeAssistantError):
        async_add_external_statistics(hass, external_metadata,
                                      (external_statistics, ))
    wait_recording_done(hass)
    assert statistics_during_period(hass, zero, period="hour") == {}
    assert list_statistic_ids(hass) == []
    assert get_metadata(hass,
                        statistic_ids=("sensor.total_energy_import", )) == {}

    # Attempt to insert statistics for the wrong domain
    external_metadata = {**_external_metadata, "source": "other"}
    external_statistics = {**_external_statistics}
    with pytest.raises(HomeAssistantError):
        async_add_external_statistics(hass, external_metadata,
                                      (external_statistics, ))
    wait_recording_done(hass)
    assert statistics_during_period(hass, zero, period="hour") == {}
    assert list_statistic_ids(hass) == []
    assert get_metadata(hass,
                        statistic_ids=("test:total_energy_import", )) == {}

    # Attempt to insert statistics for an naive starting time
    external_metadata = {**_external_metadata}
    external_statistics = {
        **_external_statistics,
        "start": period1.replace(tzinfo=None),
    }
    with pytest.raises(HomeAssistantError):
        async_add_external_statistics(hass, external_metadata,
                                      (external_statistics, ))
    wait_recording_done(hass)
    assert statistics_during_period(hass, zero, period="hour") == {}
    assert list_statistic_ids(hass) == []
    assert get_metadata(hass,
                        statistic_ids=("test:total_energy_import", )) == {}

    # Attempt to insert statistics for an invalid starting time
    external_metadata = {**_external_metadata}
    external_statistics = {
        **_external_statistics, "start": period1.replace(minute=1)
    }
    with pytest.raises(HomeAssistantError):
        async_add_external_statistics(hass, external_metadata,
                                      (external_statistics, ))
    wait_recording_done(hass)
    assert statistics_during_period(hass, zero, period="hour") == {}
    assert list_statistic_ids(hass) == []
    assert get_metadata(hass,
                        statistic_ids=("test:total_energy_import", )) == {}
async def test_external_statistics(hass, hass_ws_client, caplog):
    """Test inserting external statistics."""
    client = await hass_ws_client()
    await async_init_recorder_component(hass)

    assert "Compiling statistics for" not in caplog.text
    assert "Statistics already compiled" not in caplog.text

    zero = dt_util.utcnow()
    period1 = zero.replace(minute=0, second=0,
                           microsecond=0) + timedelta(hours=1)
    period2 = zero.replace(minute=0, second=0,
                           microsecond=0) + timedelta(hours=2)

    external_statistics1 = {
        "start": period1,
        "last_reset": None,
        "state": 0,
        "sum": 2,
    }
    external_statistics2 = {
        "start": period2,
        "last_reset": None,
        "state": 1,
        "sum": 3,
    }

    external_metadata = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_energy_import",
        "unit_of_measurement": "kWh",
    }

    async_add_external_statistics(hass, external_metadata,
                                  (external_statistics1, external_statistics2))
    await async_wait_recording_done_without_instance(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(0.0),
                "sum": approx(2.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }
    statistic_ids = list_statistic_ids(hass)
    assert statistic_ids == [{
        "has_mean": False,
        "has_sum": True,
        "statistic_id": "test:total_energy_import",
        "name": "Total imported energy",
        "source": "test",
        "unit_of_measurement": "kWh",
    }]
    metadata = get_metadata(hass, statistic_ids=("test:total_energy_import", ))
    assert metadata == {
        "test:total_energy_import": (
            1,
            {
                "has_mean": False,
                "has_sum": True,
                "name": "Total imported energy",
                "source": "test",
                "statistic_id": "test:total_energy_import",
                "unit_of_measurement": "kWh",
            },
        )
    }
    last_stats = get_last_statistics(hass, 1, "test:total_energy_import", True)
    assert last_stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }

    # Update the previously inserted statistics
    external_statistics = {
        "start": period1,
        "last_reset": None,
        "state": 5,
        "sum": 6,
    }
    async_add_external_statistics(hass, external_metadata,
                                  (external_statistics, ))
    await async_wait_recording_done_without_instance(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(5.0),
                "sum": approx(6.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }

    # Update the previously inserted statistics
    external_statistics = {
        "start": period1,
        "max": 1,
        "mean": 2,
        "min": 3,
        "last_reset": None,
        "state": 4,
        "sum": 5,
    }
    async_add_external_statistics(hass, external_metadata,
                                  (external_statistics, ))
    await async_wait_recording_done_without_instance(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": approx(1.0),
                "mean": approx(2.0),
                "min": approx(3.0),
                "last_reset": None,
                "state": approx(4.0),
                "sum": approx(5.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }

    await client.send_json({
        "id": 1,
        "type": "recorder/adjust_sum_statistics",
        "statistic_id": "test:total_energy_import",
        "start_time": period2.isoformat(),
        "adjustment": 1000.0,
    })
    response = await client.receive_json()
    assert response["success"]

    await async_wait_recording_done_without_instance(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": approx(1.0),
                "mean": approx(2.0),
                "min": approx(3.0),
                "last_reset": None,
                "state": approx(4.0),
                "sum": approx(5.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(1003.0),
            },
        ]
    }
Exemple #8
0
def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog):
    """Test compiling multiple hourly statistics."""
    zero = dt_util.utcnow()
    hass = hass_recorder()
    recorder = hass.data[DATA_INSTANCE]
    setup_component(hass, "sensor", {})
    sns1_attr = {**ENERGY_SENSOR_ATTRIBUTES, "last_reset": None}
    sns2_attr = {**ENERGY_SENSOR_ATTRIBUTES, "last_reset": None}
    sns3_attr = {
        **ENERGY_SENSOR_ATTRIBUTES,
        "unit_of_measurement": "Wh",
        "last_reset": None,
    }
    sns4_attr = {**ENERGY_SENSOR_ATTRIBUTES}
    seq1 = [10, 15, 20, 10, 30, 40, 50, 60, 70]
    seq2 = [110, 120, 130, 0, 30, 45, 55, 65, 75]
    seq3 = [0, 0, 5, 10, 30, 50, 60, 80, 90]
    seq4 = [0, 0, 5, 10, 30, 50, 60, 80, 90]

    four, eight, states = record_energy_states(hass, zero, "sensor.test1",
                                               sns1_attr, seq1)
    _, _, _states = record_energy_states(hass, zero, "sensor.test2", sns2_attr,
                                         seq2)
    states = {**states, **_states}
    _, _, _states = record_energy_states(hass, zero, "sensor.test3", sns3_attr,
                                         seq3)
    states = {**states, **_states}
    _, _, _states = record_energy_states(hass, zero, "sensor.test4", sns4_attr,
                                         seq4)
    states = {**states, **_states}
    hist = history.get_significant_states(hass, zero - timedelta.resolution,
                                          eight + timedelta.resolution)
    assert dict(states)["sensor.test1"] == dict(hist)["sensor.test1"]

    recorder.do_adhoc_statistics(period="hourly", start=zero)
    wait_recording_done(hass)
    recorder.do_adhoc_statistics(period="hourly",
                                 start=zero + timedelta(hours=1))
    wait_recording_done(hass)
    recorder.do_adhoc_statistics(period="hourly",
                                 start=zero + timedelta(hours=2))
    wait_recording_done(hass)
    statistic_ids = list_statistic_ids(hass)
    assert statistic_ids == [
        {
            "statistic_id": "sensor.test1",
            "unit_of_measurement": "kWh"
        },
        {
            "statistic_id": "sensor.test2",
            "unit_of_measurement": "kWh"
        },
        {
            "statistic_id": "sensor.test3",
            "unit_of_measurement": "kWh"
        },
    ]
    stats = statistics_during_period(hass, zero)
    assert stats == {
        "sensor.test1": [
            {
                "statistic_id": "sensor.test1",
                "start": process_timestamp_to_utc_isoformat(zero),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": process_timestamp_to_utc_isoformat(zero),
                "state": approx(20.0),
                "sum": approx(10.0),
            },
            {
                "statistic_id":
                "sensor.test1",
                "start":
                process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)),
                "max":
                None,
                "mean":
                None,
                "min":
                None,
                "last_reset":
                process_timestamp_to_utc_isoformat(four),
                "state":
                approx(40.0),
                "sum":
                approx(10.0),
            },
            {
                "statistic_id":
                "sensor.test1",
                "start":
                process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)),
                "max":
                None,
                "mean":
                None,
                "min":
                None,
                "last_reset":
                process_timestamp_to_utc_isoformat(four),
                "state":
                approx(70.0),
                "sum":
                approx(40.0),
            },
        ],
        "sensor.test2": [
            {
                "statistic_id": "sensor.test2",
                "start": process_timestamp_to_utc_isoformat(zero),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": process_timestamp_to_utc_isoformat(zero),
                "state": approx(130.0),
                "sum": approx(20.0),
            },
            {
                "statistic_id":
                "sensor.test2",
                "start":
                process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)),
                "max":
                None,
                "mean":
                None,
                "min":
                None,
                "last_reset":
                process_timestamp_to_utc_isoformat(four),
                "state":
                approx(45.0),
                "sum":
                approx(-95.0),
            },
            {
                "statistic_id":
                "sensor.test2",
                "start":
                process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)),
                "max":
                None,
                "mean":
                None,
                "min":
                None,
                "last_reset":
                process_timestamp_to_utc_isoformat(four),
                "state":
                approx(75.0),
                "sum":
                approx(-65.0),
            },
        ],
        "sensor.test3": [
            {
                "statistic_id": "sensor.test3",
                "start": process_timestamp_to_utc_isoformat(zero),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": process_timestamp_to_utc_isoformat(zero),
                "state": approx(5.0 / 1000),
                "sum": approx(5.0 / 1000),
            },
            {
                "statistic_id":
                "sensor.test3",
                "start":
                process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)),
                "max":
                None,
                "mean":
                None,
                "min":
                None,
                "last_reset":
                process_timestamp_to_utc_isoformat(four),
                "state":
                approx(50.0 / 1000),
                "sum":
                approx(30.0 / 1000),
            },
            {
                "statistic_id":
                "sensor.test3",
                "start":
                process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)),
                "max":
                None,
                "mean":
                None,
                "min":
                None,
                "last_reset":
                process_timestamp_to_utc_isoformat(four),
                "state":
                approx(90.0 / 1000),
                "sum":
                approx(70.0 / 1000),
            },
        ],
    }
    assert "Error while processing event StatisticsTask" not in caplog.text
Exemple #9
0
def test_compile_hourly_energy_statistics(hass_recorder, caplog, device_class,
                                          unit, native_unit, factor):
    """Test compiling hourly statistics."""
    zero = dt_util.utcnow()
    hass = hass_recorder()
    recorder = hass.data[DATA_INSTANCE]
    setup_component(hass, "sensor", {})
    attributes = {
        "device_class": device_class,
        "state_class": "measurement",
        "unit_of_measurement": unit,
        "last_reset": None,
    }
    seq = [10, 15, 20, 10, 30, 40, 50, 60, 70]

    four, eight, states = record_energy_states(hass, zero, "sensor.test1",
                                               attributes, seq)
    hist = history.get_significant_states(hass, zero - timedelta.resolution,
                                          eight + timedelta.resolution)
    assert dict(states)["sensor.test1"] == dict(hist)["sensor.test1"]

    recorder.do_adhoc_statistics(period="hourly", start=zero)
    wait_recording_done(hass)
    recorder.do_adhoc_statistics(period="hourly",
                                 start=zero + timedelta(hours=1))
    wait_recording_done(hass)
    recorder.do_adhoc_statistics(period="hourly",
                                 start=zero + timedelta(hours=2))
    wait_recording_done(hass)
    statistic_ids = list_statistic_ids(hass)
    assert statistic_ids == [{
        "statistic_id": "sensor.test1",
        "unit_of_measurement": native_unit
    }]
    stats = statistics_during_period(hass, zero)
    assert stats == {
        "sensor.test1": [
            {
                "statistic_id": "sensor.test1",
                "start": process_timestamp_to_utc_isoformat(zero),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": process_timestamp_to_utc_isoformat(zero),
                "state": approx(factor * seq[2]),
                "sum": approx(factor * 10.0),
            },
            {
                "statistic_id":
                "sensor.test1",
                "start":
                process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)),
                "max":
                None,
                "mean":
                None,
                "min":
                None,
                "last_reset":
                process_timestamp_to_utc_isoformat(four),
                "state":
                approx(factor * seq[5]),
                "sum":
                approx(factor * 10.0),
            },
            {
                "statistic_id":
                "sensor.test1",
                "start":
                process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)),
                "max":
                None,
                "mean":
                None,
                "min":
                None,
                "last_reset":
                process_timestamp_to_utc_isoformat(four),
                "state":
                approx(factor * seq[8]),
                "sum":
                approx(factor * 40.0),
            },
        ]
    }
    assert "Error while processing event StatisticsTask" not in caplog.text
Exemple #10
0
def test_external_statistics(hass_recorder, caplog):
    """Test inserting external statistics."""
    hass = hass_recorder()
    wait_recording_done(hass)
    assert "Compiling statistics for" not in caplog.text
    assert "Statistics already compiled" not in caplog.text

    zero = dt_util.utcnow()
    period1 = zero.replace(minute=0, second=0, microsecond=0) + timedelta(hours=1)
    period2 = zero.replace(minute=0, second=0, microsecond=0) + timedelta(hours=2)

    external_statistics1 = {
        "start": period1,
        "last_reset": None,
        "state": 0,
        "sum": 2,
    }
    external_statistics2 = {
        "start": period2,
        "last_reset": None,
        "state": 1,
        "sum": 3,
    }

    external_metadata = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_energy_import",
        "unit_of_measurement": "kWh",
    }

    async_add_external_statistics(
        hass, external_metadata, (external_statistics1, external_statistics2)
    )
    wait_recording_done(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(0.0),
                "sum": approx(2.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }
    statistic_ids = list_statistic_ids(hass)
    assert statistic_ids == [
        {
            "statistic_id": "test:total_energy_import",
            "name": "Total imported energy",
            "source": "test",
            "unit_of_measurement": "kWh",
        }
    ]
    metadata = get_metadata(hass, statistic_ids=("test:total_energy_import",))
    assert metadata == {
        "test:total_energy_import": (
            1,
            {
                "has_mean": False,
                "has_sum": True,
                "name": "Total imported energy",
                "source": "test",
                "statistic_id": "test:total_energy_import",
                "unit_of_measurement": "kWh",
            },
        )
    }

    # Update the previously inserted statistics
    external_statistics = {
        "start": period1,
        "last_reset": None,
        "state": 5,
        "sum": 6,
    }
    async_add_external_statistics(hass, external_metadata, (external_statistics,))
    wait_recording_done(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(5.0),
                "sum": approx(6.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }

    # Update the previously inserted statistics
    external_statistics = {
        "start": period1,
        "max": 1,
        "mean": 2,
        "min": 3,
        "last_reset": None,
        "state": 4,
        "sum": 5,
    }
    async_add_external_statistics(hass, external_metadata, (external_statistics,))
    wait_recording_done(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": approx(1.0),
                "mean": approx(2.0),
                "min": approx(3.0),
                "last_reset": None,
                "state": approx(4.0),
                "sum": approx(5.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }