def test_handling_broken_json_state_attributes(caplog): """Test we handle broken json in state attributes.""" state_attributes = StateAttributes(attributes_id=444, hash=1234, shared_attrs="{NOT_PARSE}") assert state_attributes.to_native() == {} assert "Error converting row to state attributes" in caplog.text
async def _add_db_entries(hass: HomeAssistant, cutoff: datetime, rows: int) -> None: timestamp_keep = cutoff timestamp_purge = cutoff - timedelta(microseconds=1) with recorder.session_scope(hass=hass) as session: session.add( Events( event_id=1000, event_type="KEEP", event_data="{}", origin="LOCAL", time_fired=timestamp_keep, )) session.add( States( entity_id="test.cutoff", state="keep", attributes="{}", last_changed=timestamp_keep, last_updated=timestamp_keep, event_id=1000, attributes_id=1000, )) session.add( StateAttributes( shared_attrs="{}", hash=1234, attributes_id=1000, )) for row in range(1, rows): session.add( Events( event_id=1000 + row, event_type="PURGE", event_data="{}", origin="LOCAL", time_fired=timestamp_purge, )) session.add( States( entity_id="test.cutoff", state="purge", attributes="{}", last_changed=timestamp_purge, last_updated=timestamp_purge, event_id=1000 + row, attributes_id=1000 + row, )) session.add( StateAttributes( shared_attrs="{}", hash=1234, attributes_id=1000 + row, ))
def _add_db_entries(hass: ha.HomeAssistant, point: datetime, entity_ids: list[str]) -> None: with recorder.session_scope(hass=hass) as session: for idx, entity_id in enumerate(entity_ids): session.add( Events( event_id=1001 + idx, event_type="state_changed", event_data="{}", origin="LOCAL", time_fired=point, )) session.add( States( entity_id=entity_id, state="on", attributes='{"name":"the light"}', last_changed=point, last_updated=point, event_id=1001 + idx, attributes_id=1002 + idx, )) session.add( StateAttributes( shared_attrs='{"name":"the shared light"}', hash=1234 + idx, attributes_id=1002 + idx, ))
async def _add_db_entries(hass: HomeAssistant, timestamp: datetime) -> None: with recorder.session_scope(hass=hass) as session: session.add( Events( event_id=1001, event_type="EVENT_TEST_PURGE", event_data="{}", origin="LOCAL", time_fired=timestamp, )) session.add( States( entity_id="test.recorder2", state="purgeme", attributes="{}", last_changed=timestamp, last_updated=timestamp, event_id=1001, attributes_id=1002, )) session.add( StateAttributes( shared_attrs="{}", hash=1234, attributes_id=1002, ))
def _add_state_and_state_changed_event( session: Session, entity_id: str, state: str, timestamp: datetime, event_id: int, ) -> None: """Add state and state_changed event to database for testing.""" state_attrs = StateAttributes(hash=event_id, shared_attrs=json.dumps( {entity_id: entity_id})) session.add(state_attrs) session.add( States( entity_id=entity_id, state=state, attributes=None, last_changed=timestamp, last_updated=timestamp, event_id=event_id, state_attributes=state_attrs, )) session.add( Events( event_id=event_id, event_type=EVENT_STATE_CHANGED, event_data="{}", origin="LOCAL", time_fired=timestamp, ))
def test_from_event_to_db_state_attributes(): """Test converting event to db state attributes.""" attrs = {"this_attr": True} state = ha.State("sensor.temperature", "18", attrs) event = ha.Event( EVENT_STATE_CHANGED, { "entity_id": "sensor.temperature", "old_state": None, "new_state": state }, context=state.context, ) assert StateAttributes.from_event(event).to_native() == attrs
def _add_db_entries(hass: HomeAssistant) -> None: with recorder.session_scope(hass=hass) as session: # Add states and state_changed events that should be purged for days in range(1, 4): timestamp = dt_util.utcnow() - timedelta(days=days) for event_id in range(1000, 1020): _add_state_and_state_changed_event( session, "sensor.excluded", "purgeme", timestamp, event_id * days, ) # Add state **without** state_changed event that should be purged timestamp = dt_util.utcnow() - timedelta(days=1) session.add( States( entity_id="sensor.excluded", state="purgeme", attributes="{}", last_changed=timestamp, last_updated=timestamp, )) # Add states and state_changed events that should be keeped timestamp = dt_util.utcnow() - timedelta(days=2) for event_id in range(200, 210): _add_state_and_state_changed_event( session, "sensor.keep", "keep", timestamp, event_id, ) # Add states with linked old_state_ids that need to be handled timestamp = dt_util.utcnow() - timedelta(days=0) state_attrs = StateAttributes( hash=0, shared_attrs=json.dumps({ "sensor.linked_old_state_id": "sensor.linked_old_state_id" }), ) state_1 = States( entity_id="sensor.linked_old_state_id", state="keep", attributes="{}", last_changed=timestamp, last_updated=timestamp, old_state_id=1, state_attributes=state_attrs, ) timestamp = dt_util.utcnow() - timedelta(days=4) state_2 = States( entity_id="sensor.linked_old_state_id", state="keep", attributes="{}", last_changed=timestamp, last_updated=timestamp, old_state_id=2, state_attributes=state_attrs, ) state_3 = States( entity_id="sensor.linked_old_state_id", state="keep", attributes="{}", last_changed=timestamp, last_updated=timestamp, old_state_id=62, # keep state_attributes=state_attrs, ) session.add_all((state_attrs, state_1, state_2, state_3)) # Add event that should be keeped session.add( Events( event_id=100, event_type="EVENT_KEEP", event_data="{}", origin="LOCAL", time_fired=timestamp, ))