예제 #1
0
def _normalize_states(
    hass: HomeAssistant,
    entity_history: list[State],
    device_class: str | None,
    entity_id: str,
) -> tuple[str | None, list[tuple[float, State]]]:
    """Normalize units."""
    unit = None

    if device_class not in UNIT_CONVERSIONS:
        # We're not normalizing this device class, return the state as they are
        fstates = []
        for state in entity_history:
            try:
                fstate = _parse_float(state.state)
            except (ValueError,
                    TypeError):  # TypeError to guard for NULL state in DB
                continue
            fstates.append((fstate, state))

        if fstates:
            all_units = _get_units(fstates)
            if len(all_units) > 1:
                if WARN_UNSTABLE_UNIT not in hass.data:
                    hass.data[WARN_UNSTABLE_UNIT] = set()
                if entity_id not in hass.data[WARN_UNSTABLE_UNIT]:
                    hass.data[WARN_UNSTABLE_UNIT].add(entity_id)
                    extra = ""
                    if old_metadata := statistics.get_metadata(
                            hass, entity_id):
                        extra = (
                            " and matches the unit of already compiled statistics "
                            f"({old_metadata['unit_of_measurement']})")
                    _LOGGER.warning(
                        "The unit of %s is changing, got multiple %s, generation of long term "
                        "statistics will be suppressed unless the unit is stable%s",
                        entity_id,
                        all_units,
                        extra,
                    )
                return None, []
            unit = fstates[0][1].attributes.get(ATTR_UNIT_OF_MEASUREMENT)
        return unit, fstates
예제 #2
0
 def get_fake_stats(_hass, start, _end):
     return statistics.PlatformCompiledStatistics(
         [
             sensor_stats("sensor.test1", start),
             sensor_stats("sensor.test2", start),
             sensor_stats("sensor.test3", start),
         ],
         get_metadata(
             _hass, statistic_ids=["sensor.test1", "sensor.test2", "sensor.test3"]
         ),
     )
예제 #3
0
def validate_statistics(
    hass: HomeAssistant, ) -> dict[str, list[statistics.ValidationIssue]]:
    """Validate statistics."""
    validation_result = defaultdict(list)

    entities = _get_entities(hass)

    for (
            entity_id,
            _state_class,
            device_class,
    ) in entities:
        state = hass.states.get(entity_id)
        assert state is not None

        state_unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)

        if device_class not in UNIT_CONVERSIONS:
            metadata = statistics.get_metadata(hass, entity_id)
            if not metadata:
                continue
            metadata_unit = metadata["unit_of_measurement"]
            if state_unit != metadata_unit:
                validation_result[entity_id].append(
                    statistics.ValidationIssue(
                        "units_changed",
                        {
                            "statistic_id": entity_id,
                            "state_unit": state_unit,
                            "metadata_unit": metadata_unit,
                        },
                    ))
            continue

        if state_unit not in UNIT_CONVERSIONS[device_class]:
            validation_result[entity_id].append(
                statistics.ValidationIssue(
                    "unsupported_unit",
                    {
                        "statistic_id": entity_id,
                        "device_class": device_class,
                        "state_unit": state_unit,
                    },
                ))

    return validation_result
예제 #4
0
def list_statistic_ids(hass: HomeAssistant,
                       statistic_type: str | None = None) -> dict:
    """Return statistic_ids and meta data."""
    entities = _get_entities(hass)

    statistic_ids = {}

    for entity_id, state_class, device_class in entities:
        if device_class in DEVICE_CLASS_STATISTICS[state_class]:
            provided_statistics = DEVICE_CLASS_STATISTICS[state_class][
                device_class]
        else:
            provided_statistics = DEFAULT_STATISTICS[state_class]

        if statistic_type is not None and statistic_type not in provided_statistics:
            continue

        state = hass.states.get(entity_id)
        assert state

        if ("sum" in provided_statistics
                and ATTR_LAST_RESET not in state.attributes
                and state.attributes.get(ATTR_STATE_CLASS)
                == STATE_CLASS_MEASUREMENT):
            continue

        metadata = statistics.get_metadata(hass, entity_id)
        if metadata:
            native_unit: str | None = metadata["unit_of_measurement"]
        else:
            native_unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)

        if device_class not in UNIT_CONVERSIONS:
            statistic_ids[entity_id] = native_unit
            continue

        if native_unit not in UNIT_CONVERSIONS[device_class]:
            continue

        statistics_unit = DEVICE_CLASS_UNITS[device_class]
        statistic_ids[entity_id] = statistics_unit

    return statistic_ids
예제 #5
0
def validate_statistics(
    hass: HomeAssistant, ) -> dict[str, list[statistics.ValidationIssue]]:
    """Validate statistics."""
    validation_result = defaultdict(list)

    sensor_states = _get_sensor_states(hass)

    for state in sensor_states:
        entity_id = state.entity_id
        device_class = state.attributes.get(ATTR_DEVICE_CLASS)
        state_unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)

        if device_class not in UNIT_CONVERSIONS:
            metadata = statistics.get_metadata(hass, (entity_id, ))
            if not metadata:
                continue
            metadata_unit = metadata[entity_id][1]["unit_of_measurement"]
            if state_unit != metadata_unit:
                validation_result[entity_id].append(
                    statistics.ValidationIssue(
                        "units_changed",
                        {
                            "statistic_id": entity_id,
                            "state_unit": state_unit,
                            "metadata_unit": metadata_unit,
                        },
                    ))
            continue

        if state_unit not in UNIT_CONVERSIONS[device_class]:
            validation_result[entity_id].append(
                statistics.ValidationIssue(
                    "unsupported_unit",
                    {
                        "statistic_id": entity_id,
                        "device_class": device_class,
                        "state_unit": state_unit,
                    },
                ))

    return validation_result
예제 #6
0
def validate_statistics(
    hass: HomeAssistant, ) -> dict[str, list[statistics.ValidationIssue]]:
    """Validate statistics."""
    validation_result = defaultdict(list)

    sensor_states = hass.states.all(DOMAIN)
    metadatas = statistics.get_metadata(hass, statistic_source=RECORDER_DOMAIN)
    sensor_entity_ids = {i.entity_id for i in sensor_states}
    sensor_statistic_ids = set(metadatas)

    for state in sensor_states:
        entity_id = state.entity_id
        device_class = state.attributes.get(ATTR_DEVICE_CLASS)
        state_class = state.attributes.get(ATTR_STATE_CLASS)
        state_unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)

        if metadata := metadatas.get(entity_id):
            if not is_entity_recorded(hass, state.entity_id):
                # Sensor was previously recorded, but no longer is
                validation_result[entity_id].append(
                    statistics.ValidationIssue(
                        "entity_no_longer_recorded",
                        {"statistic_id": entity_id},
                    ))

            if state_class not in STATE_CLASSES:
                # Sensor no longer has a valid state class
                validation_result[entity_id].append(
                    statistics.ValidationIssue(
                        "unsupported_state_class",
                        {
                            "statistic_id": entity_id,
                            "state_class": state_class
                        },
                    ))

            metadata_unit = metadata[1]["unit_of_measurement"]
            if device_class not in UNIT_CONVERSIONS:
                if state_unit != metadata_unit:
                    # The unit has changed
                    validation_result[entity_id].append(
                        statistics.ValidationIssue(
                            "units_changed",
                            {
                                "statistic_id": entity_id,
                                "state_unit": state_unit,
                                "metadata_unit": metadata_unit,
                            },
                        ))
            elif metadata_unit != DEVICE_CLASS_UNITS[device_class]:
                # The unit in metadata is not supported for this device class
                validation_result[entity_id].append(
                    statistics.ValidationIssue(
                        "unsupported_unit_metadata",
                        {
                            "statistic_id": entity_id,
                            "device_class": device_class,
                            "metadata_unit": metadata_unit,
                            "supported_unit": DEVICE_CLASS_UNITS[device_class],
                        },
                    ))
        elif state_class in STATE_CLASSES:
            if not is_entity_recorded(hass, state.entity_id):
                # Sensor is not recorded
                validation_result[entity_id].append(
                    statistics.ValidationIssue(
                        "entity_not_recorded",
                        {"statistic_id": entity_id},
                    ))
def test_external_statistics_errors(hass_recorder, caplog):
    """Test validation of external statistics."""
    hass = hass_recorder()
    wait_recording_done(hass)
    assert "Compiling statistics for" not in caplog.text
    assert "Statistics already compiled" not in caplog.text

    zero = dt_util.utcnow()
    period1 = zero.replace(minute=0, second=0,
                           microsecond=0) + timedelta(hours=1)

    _external_statistics = {
        "start": period1,
        "last_reset": None,
        "state": 0,
        "sum": 2,
    }

    _external_metadata = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_energy_import",
        "unit_of_measurement": "kWh",
    }

    # Attempt to insert statistics for an entity
    external_metadata = {
        **_external_metadata,
        "statistic_id": "sensor.total_energy_import",
    }
    external_statistics = {**_external_statistics}
    with pytest.raises(HomeAssistantError):
        async_add_external_statistics(hass, external_metadata,
                                      (external_statistics, ))
    wait_recording_done(hass)
    assert statistics_during_period(hass, zero, period="hour") == {}
    assert list_statistic_ids(hass) == []
    assert get_metadata(hass,
                        statistic_ids=("sensor.total_energy_import", )) == {}

    # Attempt to insert statistics for the wrong domain
    external_metadata = {**_external_metadata, "source": "other"}
    external_statistics = {**_external_statistics}
    with pytest.raises(HomeAssistantError):
        async_add_external_statistics(hass, external_metadata,
                                      (external_statistics, ))
    wait_recording_done(hass)
    assert statistics_during_period(hass, zero, period="hour") == {}
    assert list_statistic_ids(hass) == []
    assert get_metadata(hass,
                        statistic_ids=("test:total_energy_import", )) == {}

    # Attempt to insert statistics for an naive starting time
    external_metadata = {**_external_metadata}
    external_statistics = {
        **_external_statistics,
        "start": period1.replace(tzinfo=None),
    }
    with pytest.raises(HomeAssistantError):
        async_add_external_statistics(hass, external_metadata,
                                      (external_statistics, ))
    wait_recording_done(hass)
    assert statistics_during_period(hass, zero, period="hour") == {}
    assert list_statistic_ids(hass) == []
    assert get_metadata(hass,
                        statistic_ids=("test:total_energy_import", )) == {}

    # Attempt to insert statistics for an invalid starting time
    external_metadata = {**_external_metadata}
    external_statistics = {
        **_external_statistics, "start": period1.replace(minute=1)
    }
    with pytest.raises(HomeAssistantError):
        async_add_external_statistics(hass, external_metadata,
                                      (external_statistics, ))
    wait_recording_done(hass)
    assert statistics_during_period(hass, zero, period="hour") == {}
    assert list_statistic_ids(hass) == []
    assert get_metadata(hass,
                        statistic_ids=("test:total_energy_import", )) == {}
def test_compile_hourly_statistics(hass_recorder):
    """Test compiling hourly statistics."""
    hass = hass_recorder()
    recorder = hass.data[DATA_INSTANCE]
    setup_component(hass, "sensor", {})
    zero, four, states = record_states(hass)
    hist = history.get_significant_states(hass, zero, four)
    assert dict(states) == dict(hist)

    # Should not fail if there is nothing there yet
    stats = get_latest_short_term_statistics(hass, ["sensor.test1"])
    assert stats == {}

    for kwargs in ({}, {"statistic_ids": ["sensor.test1"]}):
        stats = statistics_during_period(hass,
                                         zero,
                                         period="5minute",
                                         **kwargs)
        assert stats == {}
    stats = get_last_short_term_statistics(hass, 0, "sensor.test1", True)
    assert stats == {}

    recorder.do_adhoc_statistics(start=zero)
    recorder.do_adhoc_statistics(start=four)
    wait_recording_done(hass)
    expected_1 = {
        "statistic_id": "sensor.test1",
        "start": process_timestamp_to_utc_isoformat(zero),
        "end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
        "mean": approx(14.915254237288135),
        "min": approx(10.0),
        "max": approx(20.0),
        "last_reset": None,
        "state": None,
        "sum": None,
    }
    expected_2 = {
        "statistic_id": "sensor.test1",
        "start": process_timestamp_to_utc_isoformat(four),
        "end": process_timestamp_to_utc_isoformat(four + timedelta(minutes=5)),
        "mean": approx(20.0),
        "min": approx(20.0),
        "max": approx(20.0),
        "last_reset": None,
        "state": None,
        "sum": None,
    }
    expected_stats1 = [
        {
            **expected_1, "statistic_id": "sensor.test1"
        },
        {
            **expected_2, "statistic_id": "sensor.test1"
        },
    ]
    expected_stats2 = [
        {
            **expected_1, "statistic_id": "sensor.test2"
        },
        {
            **expected_2, "statistic_id": "sensor.test2"
        },
    ]

    # Test statistics_during_period
    stats = statistics_during_period(hass, zero, period="5minute")
    assert stats == {
        "sensor.test1": expected_stats1,
        "sensor.test2": expected_stats2
    }

    stats = statistics_during_period(hass,
                                     zero,
                                     statistic_ids=["sensor.test2"],
                                     period="5minute")
    assert stats == {"sensor.test2": expected_stats2}

    stats = statistics_during_period(hass,
                                     zero,
                                     statistic_ids=["sensor.test3"],
                                     period="5minute")
    assert stats == {}

    # Test get_last_short_term_statistics and get_latest_short_term_statistics
    stats = get_last_short_term_statistics(hass, 0, "sensor.test1", True)
    assert stats == {}

    stats = get_last_short_term_statistics(hass, 1, "sensor.test1", True)
    assert stats == {
        "sensor.test1": [{
            **expected_2, "statistic_id": "sensor.test1"
        }]
    }

    stats = get_latest_short_term_statistics(hass, ["sensor.test1"])
    assert stats == {
        "sensor.test1": [{
            **expected_2, "statistic_id": "sensor.test1"
        }]
    }

    metadata = get_metadata(hass, statistic_ids=['sensor.test1"'])

    stats = get_latest_short_term_statistics(hass, ["sensor.test1"],
                                             metadata=metadata)
    assert stats == {
        "sensor.test1": [{
            **expected_2, "statistic_id": "sensor.test1"
        }]
    }

    stats = get_last_short_term_statistics(hass, 2, "sensor.test1", True)
    assert stats == {"sensor.test1": expected_stats1[::-1]}

    stats = get_last_short_term_statistics(hass, 3, "sensor.test1", True)
    assert stats == {"sensor.test1": expected_stats1[::-1]}

    stats = get_last_short_term_statistics(hass, 1, "sensor.test3", True)
    assert stats == {}

    recorder.get_session().query(StatisticsShortTerm).delete()
    # Should not fail there is nothing in the table
    stats = get_latest_short_term_statistics(hass, ["sensor.test1"])
    assert stats == {}
async def test_external_statistics(hass, hass_ws_client, caplog):
    """Test inserting external statistics."""
    client = await hass_ws_client()
    await async_init_recorder_component(hass)

    assert "Compiling statistics for" not in caplog.text
    assert "Statistics already compiled" not in caplog.text

    zero = dt_util.utcnow()
    period1 = zero.replace(minute=0, second=0,
                           microsecond=0) + timedelta(hours=1)
    period2 = zero.replace(minute=0, second=0,
                           microsecond=0) + timedelta(hours=2)

    external_statistics1 = {
        "start": period1,
        "last_reset": None,
        "state": 0,
        "sum": 2,
    }
    external_statistics2 = {
        "start": period2,
        "last_reset": None,
        "state": 1,
        "sum": 3,
    }

    external_metadata = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_energy_import",
        "unit_of_measurement": "kWh",
    }

    async_add_external_statistics(hass, external_metadata,
                                  (external_statistics1, external_statistics2))
    await async_wait_recording_done_without_instance(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(0.0),
                "sum": approx(2.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }
    statistic_ids = list_statistic_ids(hass)
    assert statistic_ids == [{
        "has_mean": False,
        "has_sum": True,
        "statistic_id": "test:total_energy_import",
        "name": "Total imported energy",
        "source": "test",
        "unit_of_measurement": "kWh",
    }]
    metadata = get_metadata(hass, statistic_ids=("test:total_energy_import", ))
    assert metadata == {
        "test:total_energy_import": (
            1,
            {
                "has_mean": False,
                "has_sum": True,
                "name": "Total imported energy",
                "source": "test",
                "statistic_id": "test:total_energy_import",
                "unit_of_measurement": "kWh",
            },
        )
    }
    last_stats = get_last_statistics(hass, 1, "test:total_energy_import", True)
    assert last_stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }

    # Update the previously inserted statistics
    external_statistics = {
        "start": period1,
        "last_reset": None,
        "state": 5,
        "sum": 6,
    }
    async_add_external_statistics(hass, external_metadata,
                                  (external_statistics, ))
    await async_wait_recording_done_without_instance(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(5.0),
                "sum": approx(6.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }

    # Update the previously inserted statistics
    external_statistics = {
        "start": period1,
        "max": 1,
        "mean": 2,
        "min": 3,
        "last_reset": None,
        "state": 4,
        "sum": 5,
    }
    async_add_external_statistics(hass, external_metadata,
                                  (external_statistics, ))
    await async_wait_recording_done_without_instance(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": approx(1.0),
                "mean": approx(2.0),
                "min": approx(3.0),
                "last_reset": None,
                "state": approx(4.0),
                "sum": approx(5.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }

    await client.send_json({
        "id": 1,
        "type": "recorder/adjust_sum_statistics",
        "statistic_id": "test:total_energy_import",
        "start_time": period2.isoformat(),
        "adjustment": 1000.0,
    })
    response = await client.receive_json()
    assert response["success"]

    await async_wait_recording_done_without_instance(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": approx(1.0),
                "mean": approx(2.0),
                "min": approx(3.0),
                "last_reset": None,
                "state": approx(4.0),
                "sum": approx(5.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(1003.0),
            },
        ]
    }
예제 #10
0
def compile_statistics(  # noqa: C901
        hass: HomeAssistant, start: datetime.datetime,
        end: datetime.datetime) -> dict:
    """Compile statistics for all entities during start-end.

    Note: This will query the database and must not be run in the event loop
    """
    result: dict = {}

    entities = _get_entities(hass)

    wanted_statistics = _wanted_statistics(entities)

    # Get history between start and end
    entities_full_history = [
        i[0] for i in entities if "sum" in wanted_statistics[i[0]]
    ]
    history_list = {}
    if entities_full_history:
        history_list = history.get_significant_states(  # type: ignore
            hass,
            start - datetime.timedelta.resolution,
            end,
            entity_ids=entities_full_history,
            significant_changes_only=False,
        )
    entities_significant_history = [
        i[0] for i in entities if "sum" not in wanted_statistics[i[0]]
    ]
    if entities_significant_history:
        _history_list = history.get_significant_states(  # type: ignore
            hass,
            start - datetime.timedelta.resolution,
            end,
            entity_ids=entities_significant_history,
        )
        history_list = {**history_list, **_history_list}

    for (  # pylint: disable=too-many-nested-blocks
            entity_id,
            state_class,
            device_class,
    ) in entities:
        if entity_id not in history_list:
            continue

        entity_history = history_list[entity_id]
        unit, fstates = _normalize_states(hass, entity_history, device_class,
                                          entity_id)

        if not fstates:
            continue

        # Check metadata
        if old_metadata := statistics.get_metadata(hass, entity_id):
            if old_metadata["unit_of_measurement"] != unit:
                if WARN_UNSTABLE_UNIT not in hass.data:
                    hass.data[WARN_UNSTABLE_UNIT] = set()
                if entity_id not in hass.data[WARN_UNSTABLE_UNIT]:
                    hass.data[WARN_UNSTABLE_UNIT].add(entity_id)
                    _LOGGER.warning(
                        "The unit of %s (%s) does not match the unit of already "
                        "compiled statistics (%s). Generation of long term statistics "
                        "will be suppressed unless the unit changes back to %s",
                        entity_id,
                        unit,
                        old_metadata["unit_of_measurement"],
                        old_metadata["unit_of_measurement"],
                    )
                continue

        result[entity_id] = {}

        # Set meta data
        result[entity_id]["meta"] = {
            "unit_of_measurement": unit,
            "has_mean": "mean" in wanted_statistics[entity_id],
            "has_sum": "sum" in wanted_statistics[entity_id],
        }

        # Make calculations
        stat: dict = {}
        if "max" in wanted_statistics[entity_id]:
            stat["max"] = max(*itertools.islice(zip(*fstates), 1))
        if "min" in wanted_statistics[entity_id]:
            stat["min"] = min(*itertools.islice(zip(*fstates), 1))

        if "mean" in wanted_statistics[entity_id]:
            stat["mean"] = _time_weighted_average(fstates, start, end)

        if "sum" in wanted_statistics[entity_id]:
            last_reset = old_last_reset = None
            new_state = old_state = None
            _sum = 0.0
            sum_increase = 0.0
            sum_increase_tmp = 0.0
            last_stats = statistics.get_last_statistics(
                hass, 1, entity_id, False)
            if entity_id in last_stats:
                # We have compiled history for this sensor before, use that as a starting point
                last_reset = old_last_reset = last_stats[entity_id][0][
                    "last_reset"]
                new_state = old_state = last_stats[entity_id][0]["state"]
                _sum = last_stats[entity_id][0]["sum"] or 0.0
                sum_increase = last_stats[entity_id][0]["sum_increase"] or 0.0

            for fstate, state in fstates:

                # Deprecated, will be removed in Home Assistant 2021.11
                if ("last_reset" not in state.attributes
                        and state_class == STATE_CLASS_MEASUREMENT):
                    continue

                reset = False
                if (state_class != STATE_CLASS_TOTAL_INCREASING
                        and (last_reset := state.attributes.get("last_reset"))
                        != old_last_reset):
                    if old_state is None:
                        _LOGGER.info(
                            "Compiling initial sum statistics for %s, zero point set to %s",
                            entity_id,
                            fstate,
                        )
                    else:
                        _LOGGER.info(
                            "Detected new cycle for %s, last_reset set to %s (old last_reset %s)",
                            entity_id,
                            last_reset,
                            old_last_reset,
                        )
                    reset = True
                elif old_state is None and last_reset is None:
                    reset = True
                    _LOGGER.info(
                        "Compiling initial sum statistics for %s, zero point set to %s",
                        entity_id,
                        fstate,
                    )
                elif state_class == STATE_CLASS_TOTAL_INCREASING and (
                        old_state is None
                        or reset_detected(hass, entity_id, fstate, new_state)):
                    reset = True
                    _LOGGER.info(
                        "Detected new cycle for %s, value dropped from %s to %s",
                        entity_id,
                        new_state,
                        fstate,
                    )

                if reset:
                    # The sensor has been reset, update the sum
                    if old_state is not None:
                        _sum += new_state - old_state
                        sum_increase += sum_increase_tmp
                        sum_increase_tmp = 0.0
                        if fstate > 0:
                            sum_increase_tmp += fstate
                    # ..and update the starting point
                    new_state = fstate
                    old_last_reset = last_reset
                    # Force a new cycle for an existing sensor to start at 0
                    if old_state is not None:
                        old_state = 0.0
                    else:
                        old_state = new_state
                else:
                    if new_state is not None and fstate > new_state:
                        sum_increase_tmp += fstate - new_state
                    new_state = fstate
예제 #11
0
def test_external_statistics(hass_recorder, caplog):
    """Test inserting external statistics."""
    hass = hass_recorder()
    wait_recording_done(hass)
    assert "Compiling statistics for" not in caplog.text
    assert "Statistics already compiled" not in caplog.text

    zero = dt_util.utcnow()
    period1 = zero.replace(minute=0, second=0, microsecond=0) + timedelta(hours=1)
    period2 = zero.replace(minute=0, second=0, microsecond=0) + timedelta(hours=2)

    external_statistics1 = {
        "start": period1,
        "last_reset": None,
        "state": 0,
        "sum": 2,
    }
    external_statistics2 = {
        "start": period2,
        "last_reset": None,
        "state": 1,
        "sum": 3,
    }

    external_metadata = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_energy_import",
        "unit_of_measurement": "kWh",
    }

    async_add_external_statistics(
        hass, external_metadata, (external_statistics1, external_statistics2)
    )
    wait_recording_done(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(0.0),
                "sum": approx(2.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }
    statistic_ids = list_statistic_ids(hass)
    assert statistic_ids == [
        {
            "statistic_id": "test:total_energy_import",
            "name": "Total imported energy",
            "source": "test",
            "unit_of_measurement": "kWh",
        }
    ]
    metadata = get_metadata(hass, statistic_ids=("test:total_energy_import",))
    assert metadata == {
        "test:total_energy_import": (
            1,
            {
                "has_mean": False,
                "has_sum": True,
                "name": "Total imported energy",
                "source": "test",
                "statistic_id": "test:total_energy_import",
                "unit_of_measurement": "kWh",
            },
        )
    }

    # Update the previously inserted statistics
    external_statistics = {
        "start": period1,
        "last_reset": None,
        "state": 5,
        "sum": 6,
    }
    async_add_external_statistics(hass, external_metadata, (external_statistics,))
    wait_recording_done(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(5.0),
                "sum": approx(6.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }

    # Update the previously inserted statistics
    external_statistics = {
        "start": period1,
        "max": 1,
        "mean": 2,
        "min": 3,
        "last_reset": None,
        "state": 4,
        "sum": 5,
    }
    async_add_external_statistics(hass, external_metadata, (external_statistics,))
    wait_recording_done(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": approx(1.0),
                "mean": approx(2.0),
                "min": approx(3.0),
                "last_reset": None,
                "state": approx(4.0),
                "sum": approx(5.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }