def handle_subscribe_entities(hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]) -> None: """Handle subscribe entities command.""" entity_ids = set(msg.get("entity_ids", [])) @callback def forward_entity_changes(event: Event) -> None: """Forward entity state changed events to websocket.""" if not connection.user.permissions.check_entity( event.data["entity_id"], POLICY_READ): return if entity_ids and event.data["entity_id"] not in entity_ids: return connection.send_message( lambda: messages.cached_state_diff_message(msg["id"], event)) # We must never await between sending the states and listening for # state changed events or we will introduce a race condition # where some states are missed states = _async_get_allowed_states(hass, connection) connection.subscriptions[msg["id"]] = hass.bus.async_listen( EVENT_STATE_CHANGED, forward_entity_changes, run_immediately=True) connection.send_result(msg["id"]) data: dict[str, dict[str, dict]] = { messages.ENTITY_EVENT_ADD: { state.entity_id: messages.compressed_state_dict_add(state) for state in states if not entity_ids or state.entity_id in entity_ids } } # JSON serialize here so we can recover if it blows up due to the # state machine containing unserializable data. This command is required # to succeed for the UI to show. response = messages.event_message(msg["id"], data) try: connection.send_message(JSON_DUMP(response)) return except (ValueError, TypeError): connection.logger.error( "Unable to serialize to JSON. Bad data found at %s", format_unserializable_data( find_paths_unserializable_data(response, dump=JSON_DUMP)), ) del response add_entities = data[messages.ENTITY_EVENT_ADD] cannot_serialize: list[str] = [] for entity_id, state_dict in add_entities.items(): try: JSON_DUMP(state_dict) except (ValueError, TypeError): cannot_serialize.append(entity_id) for entity_id in cannot_serialize: del add_entities[entity_id] connection.send_message(JSON_DUMP(messages.event_message(msg["id"], data)))
def handle_get_states(hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]) -> None: """Handle get states command.""" states = _async_get_allowed_states(hass, connection) # JSON serialize here so we can recover if it blows up due to the # state machine containing unserializable data. This command is required # to succeed for the UI to show. response = messages.result_message(msg["id"], states) try: connection.send_message(JSON_DUMP(response)) return except (ValueError, TypeError): connection.logger.error( "Unable to serialize to JSON. Bad data found at %s", format_unserializable_data( find_paths_unserializable_data(response, dump=JSON_DUMP)), ) del response # If we can't serialize, we'll filter out unserializable states serialized = [] for state in states: try: serialized.append(JSON_DUMP(state)) except (ValueError, TypeError): # Error is already logged above pass # We now have partially serialized states. Craft some JSON. response2 = JSON_DUMP(messages.result_message(msg["id"], ["TO_REPLACE"])) response2 = response2.replace('"TO_REPLACE"', ", ".join(serialized)) connection.send_message(response2)
def message_to_json(message: dict[str, Any]) -> str: """Serialize a websocket message to json.""" try: return JSON_DUMP(message) except (ValueError, TypeError): _LOGGER.error( "Unable to serialize to JSON. Bad data found at %s", format_unserializable_data( find_paths_unserializable_data(message, dump=JSON_DUMP)), ) return JSON_DUMP( error_message(message["id"], const.ERR_UNKNOWN_ERROR, "Invalid JSON in response"))
async def _async_events_consumer( subscriptions_setup_complete_time: dt, connection: ActiveConnection, msg_id: int, stream_queue: asyncio.Queue[Event], event_processor: EventProcessor, ) -> None: """Stream events from the queue.""" event_processor.switch_to_live() while True: events: list[Event] = [await stream_queue.get()] # If the event is older than the last db # event we already sent it so we skip it. if events[0].time_fired <= subscriptions_setup_complete_time: continue # We sleep for the EVENT_COALESCE_TIME so # we can group events together to minimize # the number of websocket messages when the # system is overloaded with an event storm await asyncio.sleep(EVENT_COALESCE_TIME) while not stream_queue.empty(): events.append(stream_queue.get_nowait()) if logbook_events := event_processor.humanify( async_event_to_row(e) for e in events): connection.send_message( JSON_DUMP( messages.event_message( msg_id, {"events": logbook_events}, )))
def _ws_get_list_statistic_ids( hass: HomeAssistant, msg_id: int, statistic_type: Literal["mean"] | Literal["sum"] | None = None, ) -> str: """Fetch a list of available statistic_id and convert them to json in the executor.""" return JSON_DUMP( messages.result_message(msg_id, list_statistic_ids(hass, None, statistic_type)) )
async def json_serialize_states(hass): """Serialize million states with websocket default encoder.""" states = [ core.State("light.kitchen", "on", {"friendly_name": "Kitchen Lights"}) for _ in range(10**6) ] start = timer() JSON_DUMP(states) return timer() - start
def _ws_formatted_get_events( msg_id: int, start_time: dt, end_time: dt, event_processor: EventProcessor, ) -> str: """Fetch events and convert them to json in the executor.""" return JSON_DUMP( messages.result_message( msg_id, event_processor.get_events(start_time, end_time)))
def _ws_get_significant_states( hass: HomeAssistant, msg_id: int, start_time: dt, end_time: dt | None, entity_ids: list[str] | None, filters: Filters | None, use_include_order: bool | None, include_start_time_state: bool, significant_changes_only: bool, minimal_response: bool, no_attributes: bool, ) -> str: """Fetch history significant_states and convert them to json in the executor.""" states = history.get_significant_states( hass, start_time, end_time, entity_ids, filters, include_start_time_state, significant_changes_only, minimal_response, no_attributes, True, ) if not use_include_order or not filters: return JSON_DUMP(messages.result_message(msg_id, states)) return JSON_DUMP( messages.result_message( msg_id, { order_entity: states.pop(order_entity) for order_entity in filters.included_entities if order_entity in states } | states, ) )
def _async_send_empty_response(connection: ActiveConnection, msg_id: int, start_time: dt, end_time: dt | None) -> None: """Send an empty response. The current case for this is when they ask for entity_ids that will all be filtered away because they have UOMs or state_class. """ connection.send_result(msg_id) stream_end_time = end_time or dt_util.utcnow() empty_stream_message = _generate_stream_message([], start_time, stream_end_time) empty_response = messages.event_message(msg_id, empty_stream_message) connection.send_message(JSON_DUMP(empty_response))
def _ws_get_statistics_during_period( hass: HomeAssistant, msg_id: int, start_time: dt, end_time: dt | None = None, statistic_ids: list[str] | None = None, period: Literal["5minute", "day", "hour", "month"] = "hour", ) -> str: """Fetch statistics and convert them to json in the executor.""" return JSON_DUMP( messages.result_message( msg_id, statistics_during_period(hass, start_time, end_time, statistic_ids, period), ) )
def _ws_stream_get_events( msg_id: int, start_day: dt, end_day: dt, formatter: Callable[[int, Any], dict[str, Any]], event_processor: EventProcessor, partial: bool, ) -> tuple[str, dt | None]: """Fetch events and convert them to json in the executor.""" events = event_processor.get_events(start_day, end_day) last_time = None if events: last_time = dt_util.utc_from_timestamp(events[-1]["when"]) message = _generate_stream_message(events, start_day, end_day) if partial: # This is a hint to consumers of the api that # we are about to send a another block of historical # data in case the UI needs to show that historical # data is still loading in the future message["partial"] = True return JSON_DUMP(formatter(msg_id, message)), last_time
def process(value: Any) -> str: """Dump json.""" return JSON_DUMP(value)
async def test_validation_grid(hass, mock_energy_manager, mock_is_entity_recorded, mock_get_metadata): """Test validating grid with sensors for energy and cost/compensation.""" mock_is_entity_recorded["sensor.grid_cost_1"] = False mock_is_entity_recorded["sensor.grid_compensation_1"] = False mock_get_metadata["sensor.grid_cost_1"] = None mock_get_metadata["sensor.grid_compensation_1"] = None await mock_energy_manager.async_update({ "energy_sources": [{ "type": "grid", "flow_from": [{ "stat_energy_from": "sensor.grid_consumption_1", "stat_cost": "sensor.grid_cost_1", }], "flow_to": [{ "stat_energy_to": "sensor.grid_production_1", "stat_compensation": "sensor.grid_compensation_1", }], }] }) hass.states.async_set( "sensor.grid_consumption_1", "10.10", { "device_class": "energy", "unit_of_measurement": "beers", "state_class": "total_increasing", }, ) hass.states.async_set( "sensor.grid_production_1", "10.10", { "device_class": "energy", "unit_of_measurement": "beers", "state_class": "total_increasing", }, ) result = await validate.async_validate(hass) # verify its also json serializable JSON_DUMP(result) assert result.as_dict() == { "energy_sources": [[ { "type": "entity_unexpected_unit_energy", "identifier": "sensor.grid_consumption_1", "value": "beers", }, { "type": "statistics_not_defined", "identifier": "sensor.grid_cost_1", "value": None, }, { "type": "recorder_untracked", "identifier": "sensor.grid_cost_1", "value": None, }, { "type": "entity_not_defined", "identifier": "sensor.grid_cost_1", "value": None, }, { "type": "entity_unexpected_unit_energy", "identifier": "sensor.grid_production_1", "value": "beers", }, { "type": "statistics_not_defined", "identifier": "sensor.grid_compensation_1", "value": None, }, { "type": "recorder_untracked", "identifier": "sensor.grid_compensation_1", "value": None, }, { "type": "entity_not_defined", "identifier": "sensor.grid_compensation_1", "value": None, }, ]], "device_consumption": [], }