def _add_data_in_last_run(entities): """Add test data in the last recorder_run.""" # pylint: disable=protected-access t_now = dt_util.utcnow() - timedelta(minutes=10) t_min_1 = t_now - timedelta(minutes=20) t_min_2 = t_now - timedelta(minutes=30) recorder_runs = recorder.get_model('RecorderRuns') states = recorder.get_model('States') with recorder.session_scope() as session: run = recorder_runs( start=t_min_2, end=t_now, created=t_min_2 ) recorder._INSTANCE._commit(session, run) for entity_id, state in entities.items(): dbstate = states( entity_id=entity_id, domain=split_entity_id(entity_id)[0], state=state, attributes='{}', last_changed=t_min_1, last_updated=t_min_1, created=t_min_1) recorder._INSTANCE._commit(session, dbstate)
def get_significant_states(start_time, end_time=None, entity_id=None, filters=None): """ Return states changes during UTC period start_time - end_time. Significant states are all states where there is a state change, as well as all states from certain domains (for instance thermostat so that we get current temperature in our graphs). """ entity_ids = (entity_id.lower(), ) if entity_id is not None else None states = recorder.get_model('States') query = recorder.query('States').filter( (states.domain.in_(SIGNIFICANT_DOMAINS) | (states.last_changed == states.last_updated)) & (states.last_updated > start_time)) if filters: query = filters.apply(query, entity_ids) if end_time is not None: query = query.filter(states.last_updated < end_time) states = ( state for state in recorder.execute( query.order_by(states.entity_id, states.last_updated)) if (_is_significant(state) and not state.attributes.get(ATTR_HIDDEN, False))) return states_to_json(states, start_time, entity_id, filters)
def _add_test_events(self): """Add a few events for testing.""" now = datetime.now() five_days_ago = now - timedelta(days=5) event_data = {'test_attr': 5, 'test_attr_10': 'nice'} self.hass.block_till_done() recorder._INSTANCE.block_till_done() with recorder.session_scope() as session: for event_id in range(5): if event_id < 2: timestamp = five_days_ago event_type = 'EVENT_TEST_PURGE' else: timestamp = now event_type = 'EVENT_TEST' session.add( recorder.get_model('Events')( event_type=event_type, event_data=json.dumps(event_data), origin='LOCAL', created=timestamp, time_fired=timestamp, ))
def get_significant_states(start_time, end_time=None, entity_id=None): """ Return states changes during UTC period start_time - end_time. Significant states are all states where there is a state change, as well as all states from certain domains (for instance thermostat so that we get current temperature in our graphs). """ states = recorder.get_model('States') query = recorder.query( 'States').filter((states.domain.in_(SIGNIFICANT_DOMAINS) | (states.last_changed == states.last_updated)) & ((~states.domain.in_(IGNORE_DOMAINS)) & (states.last_updated > start_time))) if end_time is not None: query = query.filter(states.last_updated < end_time) if entity_id is not None: query = query.filter_by(entity_id=entity_id.lower()) states = (state for state in recorder.execute( query.order_by(states.entity_id, states.last_updated)) if _is_significant(state)) return states_to_json(states, start_time, entity_id)
def get_results(): """Query DB for results.""" events = recorder.get_model('Events') query = recorder.query('Events').filter(( events.time_fired > start_day) & (events.time_fired < end_day)) events = recorder.execute(query) return _exclude_events(events, self.config)
def _add_test_states(self): """Add multiple states to the db for testing.""" now = datetime.now() five_days_ago = now - timedelta(days=5) attributes = {'test_attr': 5, 'test_attr_10': 'nice'} self.hass.block_till_done() recorder._INSTANCE.block_till_done() for event_id in range(5): if event_id < 3: timestamp = five_days_ago state = 'purgeme' else: timestamp = now state = 'dontpurgeme' self.session.add(recorder.get_model('States')( entity_id='test.recorder2', domain='sensor', state=state, attributes=json.dumps(attributes), last_changed=timestamp, last_updated=timestamp, created=timestamp, event_id=event_id + 1000 )) self.session.commit()
def get_significant_states(start_time, end_time=None, entity_id=None): """ Return states changes during UTC period start_time - end_time. Significant states are all states where there is a state change, as well as all states from certain domains (for instance thermostat so that we get current temperature in our graphs). """ states = recorder.get_model("States") query = recorder.query("States").filter( (states.domain.in_(SIGNIFICANT_DOMAINS) | (states.last_changed == states.last_updated)) & ((~states.domain.in_(IGNORE_DOMAINS)) & (states.last_updated > start_time)) ) if end_time is not None: query = query.filter(states.last_updated < end_time) if entity_id is not None: query = query.filter_by(entity_id=entity_id.lower()) states = ( state for state in recorder.execute(query.order_by(states.entity_id, states.last_updated)) if _is_significant(state) ) return states_to_json(states, start_time, entity_id)
def update(self): """Get the latest data and update the states.""" data = self.speedtest_client.data if data is None: entity_id = 'sensor.speedtest_' + self._name.lower() states = recorder.get_model('States') try: last_state = recorder.execute( recorder.query('States').filter( (states.entity_id == entity_id) & (states.last_changed == states.last_updated) & (states.state != 'unknown') ).order_by(states.state_id.desc()).limit(1)) except TypeError: return except RuntimeError: return if not last_state: return self._state = last_state[0].state elif self.type == 'ping': self._state = data['ping'] elif self.type == 'download': self._state = data['download'] elif self.type == 'upload': self._state = data['upload']
def update(self): """Get the latest data and update the states.""" data = self.speedtest_client.data if data is None: entity_id = 'sensor.speedtest_' + self._name.lower() states = recorder.get_model('States') try: last_state = recorder.execute( recorder.query('States').filter( (states.entity_id == entity_id) & (states.last_changed == states.last_updated) & (states.state != 'unknown')).order_by( states.state_id.desc()).limit(1)) except TypeError: return except RuntimeError: return if not last_state: return self._state = last_state[0].state elif self.type == 'ping': self._state = data['ping'] elif self.type == 'download': self._state = data['download'] elif self.type == 'upload': self._state = data['upload']
def get_states(utc_point_in_time, entity_ids=None, run=None): """Return the states at a specific point in time.""" if run is None: run = recorder.run_information(utc_point_in_time) # History did not run before utc_point_in_time if run is None: return [] from sqlalchemy import and_, func states = recorder.get_model('States') most_recent_state_ids = recorder.query( func.max(states.state_id).label( 'max_state_id')).filter((states.created >= run.start) & (states.created < utc_point_in_time)) if entity_ids is not None: most_recent_state_ids = most_recent_state_ids.filter( states.entity_id.in_(entity_ids)) most_recent_state_ids = most_recent_state_ids.group_by( states.entity_id).subquery() query = recorder.query('States').join( most_recent_state_ids, and_(states.state_id == most_recent_state_ids.c.max_state_id)) return recorder.execute(query)
def _add_test_events(self): """Add a few events for testing.""" now = datetime.now() five_days_ago = now - timedelta(days=5) event_data = {'test_attr': 5, 'test_attr_10': 'nice'} self.hass.block_till_done() recorder._INSTANCE.block_till_done() with recorder.session_scope() as session: for event_id in range(5): if event_id < 2: timestamp = five_days_ago event_type = 'EVENT_TEST_PURGE' else: timestamp = now event_type = 'EVENT_TEST' session.add(recorder.get_model('Events')( event_type=event_type, event_data=json.dumps(event_data), origin='LOCAL', created=timestamp, time_fired=timestamp, ))
def get_states(utc_point_in_time, entity_ids=None, run=None, filters=None): """Return the states at a specific point in time.""" if run is None: run = recorder.run_information(utc_point_in_time) # History did not run before utc_point_in_time if run is None: return [] from sqlalchemy import and_, func states = recorder.get_model('States') most_recent_state_ids = recorder.query( func.max(states.state_id).label('max_state_id') ).filter( (states.created >= run.start) & (states.created < utc_point_in_time) & (~states.domain.in_(IGNORE_DOMAINS))) if filters: most_recent_state_ids = filters.apply(most_recent_state_ids, entity_ids) most_recent_state_ids = most_recent_state_ids.group_by( states.entity_id).subquery() query = recorder.query('States').join(most_recent_state_ids, and_( states.state_id == most_recent_state_ids.c.max_state_id)) for state in recorder.execute(query): if not state.attributes.get(ATTR_HIDDEN, False): yield state
def _add_test_states(self): """Add multiple states to the db for testing.""" now = datetime.now() five_days_ago = now - timedelta(days=5) attributes = {'test_attr': 5, 'test_attr_10': 'nice'} self.hass.block_till_done() recorder._INSTANCE.block_till_done() for event_id in range(5): if event_id < 3: timestamp = five_days_ago state = 'purgeme' else: timestamp = now state = 'dontpurgeme' self.session.add( recorder.get_model('States')(entity_id='test.recorder2', domain='sensor', state=state, attributes=json.dumps(attributes), last_changed=timestamp, last_updated=timestamp, created=timestamp, event_id=event_id + 1000)) self.session.commit()
def get_states(utc_point_in_time, entity_ids=None, run=None): """Return the states at a specific point in time.""" if run is None: run = recorder.run_information(utc_point_in_time) # History did not run before utc_point_in_time if run is None: return [] from sqlalchemy import and_, func states = recorder.get_model("States") most_recent_state_ids = recorder.query(func.max(states.state_id).label("max_state_id")).filter( (states.created >= run.start) & (states.created < utc_point_in_time) ) if entity_ids is not None: most_recent_state_ids = most_recent_state_ids.filter(states.entity_id.in_(entity_ids)) most_recent_state_ids = most_recent_state_ids.group_by(states.entity_id).subquery() query = recorder.query("States").join( most_recent_state_ids, and_(states.state_id == most_recent_state_ids.c.max_state_id) ) return recorder.execute(query)
def get_results(): """Query DB for results.""" events = recorder.get_model('Events') query = recorder.query('Events').filter( (events.time_fired > start_day) & (events.time_fired < end_day)) events = recorder.execute(query) return _exclude_events(events, self.config)
def last_5_states(entity_id): """Return the last 5 states for entity_id.""" entity_id = entity_id.lower() states = recorder.get_model('States') return recorder.execute( recorder.query('States').filter((states.entity_id == entity_id) & ( states.last_changed == states.last_updated)).order_by( states.state_id.desc()).limit(5))
def last_5_states(entity_id): """Return the last 5 states for entity_id.""" entity_id = entity_id.lower() states = recorder.get_model('States') return recorder.execute( recorder.query('States').filter( (states.entity_id == entity_id) & (states.last_changed == states.last_updated) ).order_by(states.state_id.desc()).limit(5))
def last_recorder_run(): """Retireve the last closed recorder run from the DB.""" recorder.get_instance() rec_runs = recorder.get_model('RecorderRuns') with recorder.session_scope() as session: res = recorder.query(rec_runs).order_by(rec_runs.end.desc()).first() if res is None: return None session.expunge(res) return res
def get(self, request, datetime=None): """Retrieve logbook entries.""" start_day = dt_util.as_utc(datetime or dt_util.start_of_local_day()) end_day = start_day + timedelta(days=1) events = recorder.get_model('Events') query = recorder.query('Events').filter( (events.time_fired > start_day) & (events.time_fired < end_day)) events = recorder.execute(query) return self.json(humanify(events))
def test_purge_old_events(self): """Test deleting old events.""" self._add_test_events() events = recorder.query('Events').filter( recorder.get_model('Events').event_type.like("EVENT_TEST%")) self.assertEqual(events.count(), 5) # run purge_old_data() recorder._INSTANCE.purge_days = 4 recorder._INSTANCE._purge_old_data() # now we should only have 3 events left self.assertEqual(events.count(), 3)
def state_changes_during_period(start_time, end_time=None, entity_id=None): """Return states changes during UTC period start_time - end_time.""" states = recorder.get_model("States") query = recorder.query("States").filter( (states.last_changed == states.last_updated) & (states.last_changed > start_time) ) if end_time is not None: query = query.filter(states.last_updated < end_time) if entity_id is not None: query = query.filter_by(entity_id=entity_id.lower()) states = recorder.execute(query.order_by(states.entity_id, states.last_updated)) return states_to_json(states, start_time, entity_id)
def apply(self, query, entity_ids=None): """Apply the include/exclude filter on domains and entities on query. Following rules apply: * only the include section is configured - just query the specified entities or domains. * only the exclude section is configured - filter the specified entities and domains from all the entities in the system. * if include and exclude is defined - select the entities specified in the include and filter out the ones from the exclude list. """ states = recorder.get_model('States') # specific entities requested - do not in/exclude anything if entity_ids is not None: return query.filter(states.entity_id.in_(entity_ids)) query = query.filter(~states.domain.in_(IGNORE_DOMAINS)) filter_query = None # filter if only excluded domain is configured if self.excluded_domains and not self.included_domains: filter_query = ~states.domain.in_(self.excluded_domains) if self.included_entities: filter_query &= states.entity_id.in_(self.included_entities) # filter if only included domain is configured elif not self.excluded_domains and self.included_domains: filter_query = states.domain.in_(self.included_domains) if self.included_entities: filter_query |= states.entity_id.in_(self.included_entities) # filter if included and excluded domain is configured elif self.excluded_domains and self.included_domains: filter_query = ~states.domain.in_(self.excluded_domains) if self.included_entities: filter_query &= (states.domain.in_(self.included_domains) | states.entity_id.in_(self.included_entities)) else: filter_query &= (states.domain.in_(self.included_domains) & ~ states.domain.in_(self.excluded_domains)) # no domain filter just included entities elif not self.excluded_domains and not self.included_domains and \ self.included_entities: filter_query = states.entity_id.in_(self.included_entities) if filter_query is not None: query = query.filter(filter_query) # finally apply excluded entities filter if configured if self.excluded_entities: query = query.filter(~states.entity_id.in_(self.excluded_entities)) return query
def state_changes_during_period(start_time, end_time=None, entity_id=None): """Return states changes during UTC period start_time - end_time.""" states = recorder.get_model('States') query = recorder.query('States').filter( (states.last_changed == states.last_updated) & (states.last_changed > start_time)) if end_time is not None: query = query.filter(states.last_updated < end_time) if entity_id is not None: query = query.filter_by(entity_id=entity_id.lower()) states = recorder.execute( query.order_by(states.entity_id, states.last_updated)) return states_to_json(states, start_time, entity_id)
def test_purge_disabled(self): """Test leaving purge_days disabled.""" self._add_test_states() self._add_test_events() # make sure we start with 5 states and events states = recorder.query('States') events = recorder.query('Events').filter( recorder.get_model('Events').event_type.like("EVENT_TEST%")) self.assertEqual(states.count(), 5) self.assertEqual(events.count(), 5) # run purge_old_data() recorder._INSTANCE.purge_days = None recorder._INSTANCE._purge_old_data() # we should have all of our states still self.assertEqual(states.count(), 5) self.assertEqual(events.count(), 5)
def test_recorder_errors_exceptions(hass_recorder): \ # pylint: disable=redefined-outer-name """Test session_scope and get_model errors.""" # Model cannot be resolved assert recorder.get_model('dont-exist') is None # Verify the instance fails before setup with pytest.raises(RuntimeError): recorder._verify_instance() # Setup the recorder hass_recorder() recorder._verify_instance() # Verify session scope raises (and prints) an exception with patch('homeassistant.components.recorder._LOGGER.error') as e_mock, \ pytest.raises(Exception) as err: with recorder.session_scope() as session: session.execute('select * from notthere') assert e_mock.call_count == 1 assert recorder.ERROR_QUERY[:-4] in e_mock.call_args[0][0] assert 'no such table' in str(err.value)
def test_recorder_errors_exceptions(hass_recorder): \ # pylint: disable=redefined-outer-name """Test session_scope and get_model errors.""" # Model cannot be resolved assert recorder.get_model('dont-exist') is None # Verify the instance fails before setup with pytest.raises(RuntimeError): recorder.get_instance() # Setup the recorder hass_recorder() recorder.get_instance() # Verify session scope raises (and prints) an exception with patch('homeassistant.components.recorder._LOGGER.error') as e_mock, \ pytest.raises(Exception) as err: with recorder.session_scope() as session: session.execute('select * from notthere') assert e_mock.call_count == 1 assert recorder.ERROR_QUERY[:-4] in e_mock.call_args[0][0] assert 'no such table' in str(err.value)
def _restore_states(service): """Restore states.""" run = recorder.run_information(dt_util.utcnow()) if run is None: return from sqlalchemy import and_, func states = recorder.get_model('States') most_recent_state_ids = recorder.query( func.max(states.state_id).label('max_state_id')) most_recent_state_ids = most_recent_state_ids.group_by( states.entity_id).subquery() query = recorder.query('States').join( most_recent_state_ids, and_(states.state_id == most_recent_state_ids.c.max_state_id)) states = recorder.execute(query) data = {ATTR_ENTITY_ID: 'group.all_automations'} hass.services.call('homeassistant', SERVICE_TURN_OFF, data, True) last_services = [] for state in states: if state.domain == group.DOMAIN: continue if state.domain == input_number.DOMAIN: data = { ATTR_ENTITY_ID: state.entity_id, input_number.ATTR_VALUE: state.state } service = input_number.SERVICE_SELECT_VALUE elif state.domain == input_select.DOMAIN: data = { ATTR_ENTITY_ID: state.entity_id, input_select.ATTR_OPTION: state.state } service = input_select.SERVICE_SELECT_OPTION elif state.domain == climate.DOMAIN: data = { ATTR_ENTITY_ID: state.entity_id, climate.ATTR_TEMPERATURE: state.attributes.get('temperature') } service = climate.SERVICE_SET_TEMPERATURE elif (state.domain in [input_boolean.DOMAIN, automation.DOMAIN]): #or state.attributes.get('assumed_state')): data = {ATTR_ENTITY_ID: state.entity_id} if state.state == STATE_OFF: service = SERVICE_TURN_OFF if state.state == STATE_ON: service = SERVICE_TURN_ON else: continue if state.domain == light.DOMAIN: continue if state.domain == automation.DOMAIN: last_services.append((state.domain, service, data)) continue elif (state.domain in [switch.DOMAIN]): continue else: continue if hass.states.get(state.entity_id) is None: continue hass.services.call(state.domain, service, data, True) for (domain, service, data) in last_services: hass.services.call(domain, service, data, True)