def test_significant_states_with_session_entity_minimal_response_no_matches( hass_recorder, ): """Test getting states at a specific point in time for entities that never have been recorded.""" hass = hass_recorder() now = dt_util.utcnow() time_before_recorder_ran = now - timedelta(days=1000) with session_scope(hass=hass) as session: assert ( history.get_significant_states_with_session( hass, session, time_before_recorder_ran, now, entity_ids=["demo.id"], minimal_response=True, ) == {} ) assert ( history.get_significant_states_with_session( hass, session, time_before_recorder_ran, now, entity_ids=["demo.id", "demo.id2"], minimal_response=True, ) == {} )
def _sorted_significant_states_json( self, hass, start_time, end_time, entity_ids, include_start_time_state, significant_changes_only, minimal_response, no_attributes, ): """Fetch significant stats from the database as json.""" timer_start = time.perf_counter() with session_scope(hass=hass) as session: result = history.get_significant_states_with_session( hass, session, start_time, end_time, entity_ids, self.filters, include_start_time_state, significant_changes_only, minimal_response, no_attributes, ) result = list(result.values()) if _LOGGER.isEnabledFor(logging.DEBUG): elapsed = time.perf_counter() - timer_start _LOGGER.debug("Extracted %d states in %fs", sum(map(len, result)), elapsed) # Optionally reorder the result to respect the ordering given # by any entities explicitly included in the configuration. if self.filters and self.use_include_order: sorted_result = [] for order_entity in self.filters.included_entities: for state_list in result: if state_list[0].entity_id == order_entity: sorted_result.append(state_list) result.remove(state_list) break sorted_result.extend(result) result = sorted_result return self.json(result)
def _sorted_significant_states_json( self, hass: HomeAssistant, start_time: dt, end_time: dt, entity_ids: list[str] | None, include_start_time_state: bool, significant_changes_only: bool, minimal_response: bool, no_attributes: bool, ) -> web.Response: """Fetch significant stats from the database as json.""" timer_start = time.perf_counter() with session_scope(hass=hass) as session: states = history.get_significant_states_with_session( hass, session, start_time, end_time, entity_ids, self.filters, include_start_time_state, significant_changes_only, minimal_response, no_attributes, ) if _LOGGER.isEnabledFor(logging.DEBUG): elapsed = time.perf_counter() - timer_start _LOGGER.debug( "Extracted %d states in %fs", sum(map(len, states.values())), elapsed ) # Optionally reorder the result to respect the ordering given # by any entities explicitly included in the configuration. if not self.filters or not self.use_include_order: return self.json(list(states.values())) sorted_result = [ states.pop(order_entity) for order_entity in self.filters.included_entities if order_entity in states ] sorted_result.extend(list(states.values())) return self.json(sorted_result)
def _compile_statistics( # noqa: C901 hass: HomeAssistant, session: Session, start: datetime.datetime, end: datetime.datetime, ) -> list[StatisticResult]: """Compile statistics for all entities during start-end.""" result: list[StatisticResult] = [] sensor_states = _get_sensor_states(hass) wanted_statistics = _wanted_statistics(sensor_states) old_metadatas = statistics.get_metadata_with_session( hass, session, statistic_ids=[i.entity_id for i in sensor_states]) # Get history between start and end entities_full_history = [ i.entity_id for i in sensor_states if "sum" in wanted_statistics[i.entity_id] ] history_list = {} if entities_full_history: history_list = history.get_significant_states_with_session( # type: ignore hass, session, start - datetime.timedelta.resolution, end, entity_ids=entities_full_history, significant_changes_only=False, ) entities_significant_history = [ i.entity_id for i in sensor_states if "sum" not in wanted_statistics[i.entity_id] ] if entities_significant_history: _history_list = history.get_significant_states_with_session( # type: ignore hass, session, start - datetime.timedelta.resolution, end, entity_ids=entities_significant_history, ) history_list = {**history_list, **_history_list} # If there are no recent state changes, the sensor's state may already be pruned # from the recorder. Get the state from the state machine instead. for _state in sensor_states: if _state.entity_id not in history_list: history_list[_state.entity_id] = (_state, ) for _state in sensor_states: # pylint: disable=too-many-nested-blocks entity_id = _state.entity_id if entity_id not in history_list: continue state_class = _state.attributes[ATTR_STATE_CLASS] device_class = _state.attributes.get(ATTR_DEVICE_CLASS) entity_history = history_list[entity_id] unit, fstates = _normalize_states(hass, session, old_metadatas, entity_history, device_class, entity_id) if not fstates: continue # Check metadata if old_metadata := old_metadatas.get(entity_id): if old_metadata[1]["unit_of_measurement"] != unit: if WARN_UNSTABLE_UNIT not in hass.data: hass.data[WARN_UNSTABLE_UNIT] = set() if entity_id not in hass.data[WARN_UNSTABLE_UNIT]: hass.data[WARN_UNSTABLE_UNIT].add(entity_id) _LOGGER.warning( "The %sunit of %s (%s) does not match the unit of already " "compiled statistics (%s). Generation of long term statistics " "will be suppressed unless the unit changes back to %s", "normalized " if device_class in DEVICE_CLASS_UNITS else "", entity_id, unit, old_metadata[1]["unit_of_measurement"], old_metadata[1]["unit_of_measurement"], ) continue # Set meta data meta: StatisticMetaData = { "has_mean": "mean" in wanted_statistics[entity_id], "has_sum": "sum" in wanted_statistics[entity_id], "name": None, "source": RECORDER_DOMAIN, "statistic_id": entity_id, "unit_of_measurement": unit, } # Make calculations stat: StatisticData = {"start": start} if "max" in wanted_statistics[entity_id]: stat["max"] = max(*itertools.islice( zip(*fstates), 1)) # type: ignore[typeddict-item] if "min" in wanted_statistics[entity_id]: stat["min"] = min(*itertools.islice( zip(*fstates), 1)) # type: ignore[typeddict-item] if "mean" in wanted_statistics[entity_id]: stat["mean"] = _time_weighted_average(fstates, start, end) if "sum" in wanted_statistics[entity_id]: last_reset = old_last_reset = None new_state = old_state = None _sum = 0.0 last_stats = statistics.get_last_short_term_statistics( hass, 1, entity_id, False) if entity_id in last_stats: # We have compiled history for this sensor before, use that as a starting point last_reset = old_last_reset = last_stats[entity_id][0][ "last_reset"] new_state = old_state = last_stats[entity_id][0]["state"] _sum = last_stats[entity_id][0]["sum"] or 0.0 for fstate, state in fstates: # Deprecated, will be removed in Home Assistant 2021.11 if ("last_reset" not in state.attributes and state_class == STATE_CLASS_MEASUREMENT): continue reset = False if (state_class != STATE_CLASS_TOTAL_INCREASING and (last_reset := _last_reset_as_utc_isoformat( state.attributes.get("last_reset"), entity_id)) != old_last_reset and last_reset is not None): if old_state is None: _LOGGER.info( "Compiling initial sum statistics for %s, zero point set to %s", entity_id, fstate, ) else: _LOGGER.info( "Detected new cycle for %s, last_reset set to %s (old last_reset %s)", entity_id, last_reset, old_last_reset, ) reset = True elif old_state is None and last_reset is None: reset = True _LOGGER.info( "Compiling initial sum statistics for %s, zero point set to %s", entity_id, fstate, ) elif state_class == STATE_CLASS_TOTAL_INCREASING: try: if old_state is None or reset_detected( hass, entity_id, fstate, new_state, state): reset = True _LOGGER.info( "Detected new cycle for %s, value dropped from %s to %s, " "triggered by state with last_updated set to %s", entity_id, new_state, state.last_updated.isoformat(), fstate, ) except HomeAssistantError: continue if reset: # The sensor has been reset, update the sum if old_state is not None: _sum += new_state - old_state # ..and update the starting point new_state = fstate old_last_reset = last_reset # Force a new cycle for an existing sensor to start at 0 if old_state is not None: old_state = 0.0 else: old_state = new_state else: new_state = fstate