Exemple #1
0
    def _fetch_states_from_database(self) -> list[State]:
        """Fetch the states from the database."""
        _LOGGER.debug("%s: initializing values from the database", self.entity_id)
        states = []

        with session_scope(hass=self.hass) as session:
            query = session.query(States, StateAttributes).filter(
                States.entity_id == self._source_entity_id.lower()
            )

            if self._samples_max_age is not None:
                records_older_then = dt_util.utcnow() - self._samples_max_age
                _LOGGER.debug(
                    "%s: retrieve records not older then %s",
                    self.entity_id,
                    records_older_then,
                )
                query = query.filter(States.last_updated >= records_older_then)
            else:
                _LOGGER.debug("%s: retrieving all records", self.entity_id)

            query = query.outerjoin(
                StateAttributes, States.attributes_id == StateAttributes.attributes_id
            )
            query = query.order_by(States.last_updated.desc()).limit(
                self._samples_max_buffer_size
            )
            if results := execute(query, to_native=False, validate_entity_ids=False):
                for state, attributes in results:
                    native = state.to_native()
                    if not native.attributes:
                        native.attributes = attributes.to_native()
                    states.append(native)
Exemple #2
0
def _get_significant_states(
    hass,
    session,
    start_time,
    end_time=None,
    entity_ids=None,
    filters=None,
    include_start_time_state=True,
    significant_changes_only=True,
    minimal_response=False,
):
    """
    Return states changes during UTC period start_time - end_time.

    Significant states are all states where there is a state change,
    as well as all states from certain domains (for instance
    thermostat so that we get current temperature in our graphs).
    """
    timer_start = time.perf_counter()

    baked_query = hass.data[HISTORY_BAKERY](
        lambda session: session.query(*QUERY_STATES))

    if significant_changes_only:
        baked_query += lambda q: q.filter(
            (States.domain.in_(SIGNIFICANT_DOMAINS)
             | (States.last_changed == States.last_updated))
            & (States.last_updated > bindparam("start_time")))
    else:
        baked_query += lambda q: q.filter(States.last_updated > bindparam(
            "start_time"))

    if filters:
        filters.bake(baked_query, entity_ids)

    if end_time is not None:
        baked_query += lambda q: q.filter(States.last_updated < bindparam(
            "end_time"))

    baked_query += lambda q: q.order_by(States.entity_id, States.last_updated)

    states = execute(
        baked_query(session).params(start_time=start_time,
                                    end_time=end_time,
                                    entity_ids=entity_ids))

    if _LOGGER.isEnabledFor(logging.DEBUG):
        elapsed = time.perf_counter() - timer_start
        _LOGGER.debug("get_significant_states took %fs", elapsed)

    return _sorted_states_to_json(
        hass,
        session,
        states,
        start_time,
        entity_ids,
        filters,
        include_start_time_state,
        minimal_response,
    )
Exemple #3
0
def get_states(hass,
               utc_point_in_time,
               entity_ids=None,
               run=None,
               filters=None):
    """Return the states at a specific point in time."""
    from homeassistant.components.recorder.models import States

    if run is None:
        run = recorder.run_information(hass, utc_point_in_time)

        # History did not run before utc_point_in_time
        if run is None:
            return []

    from sqlalchemy import and_, func

    with session_scope(hass=hass) as session:
        if entity_ids and len(entity_ids) == 1:
            # Use an entirely different (and extremely fast) query if we only
            # have a single entity id
            most_recent_state_ids = session.query(
                States.state_id.label('max_state_id')).filter(
                    (States.created < utc_point_in_time)
                    & (States.entity_id.in_(entity_ids))).order_by(
                        States.created.desc())

            if filters:
                most_recent_state_ids = filters.apply(most_recent_state_ids,
                                                      entity_ids)

            most_recent_state_ids = most_recent_state_ids.limit(1)

        else:
            # We have more than one entity to look at (most commonly we want
            # all entities,) so we need to do a search on all states since the
            # last recorder run started.
            most_recent_state_ids = session.query(
                func.max(States.state_id).label('max_state_id')).filter(
                    (States.created >= run.start)
                    & (States.created < utc_point_in_time)
                    & (~States.domain.in_(IGNORE_DOMAINS)))

            if filters:
                most_recent_state_ids = filters.apply(most_recent_state_ids,
                                                      entity_ids)

            most_recent_state_ids = most_recent_state_ids.group_by(
                States.entity_id)

        most_recent_state_ids = most_recent_state_ids.subquery()

        query = session.query(States).join(
            most_recent_state_ids,
            and_(States.state_id == most_recent_state_ids.c.max_state_id))

        return [
            state for state in execute(query)
            if not state.attributes.get(ATTR_HIDDEN, False)
        ]
Exemple #4
0
    async def _load_history_from_db(self):
        """Load the history of the brightness values from the database.

        This only needs to be done once during startup.
        """
        from homeassistant.components.recorder.models import States

        start_date = datetime.now() - timedelta(days=self._conf_check_days)
        entity_id = self._readingmap.get(READING_BRIGHTNESS)
        if entity_id is None:
            _LOGGER.debug("Not reading the history from the database as "
                          "there is no brightness sensor configured")
            return

        _LOGGER.debug("Initializing values for %s from the database",
                      self._name)
        with session_scope(hass=self.hass) as session:
            query = (session.query(States).filter(
                (States.entity_id == entity_id.lower())
                and (States.last_updated > start_date)).order_by(
                    States.last_updated.asc()))
            states = execute(query)

            for state in states:
                # filter out all None, NaN and "unknown" states
                # only keep real values
                try:
                    self._brightness_history.add_measurement(
                        int(state.state), state.last_updated)
                except ValueError:
                    pass
        _LOGGER.debug("Initializing from database completed")
        self.async_schedule_update_ha_state()
Exemple #5
0
def state_changes_during_period(hass,
                                start_time,
                                end_time=None,
                                entity_id=None):
    """Return states changes during UTC period start_time - end_time."""
    with session_scope(hass=hass) as session:
        baked_query = hass.data[HISTORY_BAKERY](
            lambda session: session.query(*QUERY_STATES))

        baked_query += lambda q: q.filter(
            (States.last_changed == States.last_updated)
            & (States.last_updated > bindparam("start_time")))

        if end_time is not None:
            baked_query += lambda q: q.filter(States.last_updated < bindparam(
                "end_time"))

        if entity_id is not None:
            baked_query += lambda q: q.filter_by(entity_id=bindparam(
                "entity_id"))
            entity_id = entity_id.lower()

        baked_query += lambda q: q.order_by(States.entity_id, States.
                                            last_updated)

        states = execute(
            baked_query(session).params(start_time=start_time,
                                        end_time=end_time,
                                        entity_id=entity_id))

        entity_ids = [entity_id] if entity_id is not None else None

        return _sorted_states_to_json(hass, session, states, start_time,
                                      entity_ids)
Exemple #6
0
def get_significant_states(hass,
                           start_time,
                           end_time=None,
                           entity_id=None,
                           filters=None):
    """
    Return states changes during UTC period start_time - end_time.

    Significant states are all states where there is a state change,
    as well as all states from certain domains (for instance
    thermostat so that we get current temperature in our graphs).
    """
    from homeassistant.components.recorder.models import States

    entity_ids = (entity_id.lower(), ) if entity_id is not None else None

    with session_scope(hass=hass) as session:
        query = session.query(
            States).filter((States.domain.in_(SIGNIFICANT_DOMAINS)
                            | (States.last_changed == States.last_updated))
                           & (States.last_updated > start_time))

        if filters:
            query = filters.apply(query, entity_ids)

        if end_time is not None:
            query = query.filter(States.last_updated < end_time)

        states = (state for state in execute(
            query.order_by(States.entity_id, States.last_updated))
                  if (_is_significant(state)
                      and not state.attributes.get(ATTR_HIDDEN, False)))

    return states_to_json(hass, states, start_time, entity_id, filters)
Exemple #7
0
def get_last_state_changes(hass, number_of_states, entity_id):
    """Return the last number_of_states."""
    start_time = dt_util.utcnow()

    with session_scope(hass=hass) as session:
        baked_query = hass.data[HISTORY_BAKERY](
            lambda session: session.query(*QUERY_STATES))
        baked_query += lambda q: q.filter(States.last_changed == States.
                                          last_updated)

        if entity_id is not None:
            baked_query += lambda q: q.filter_by(entity_id=bindparam(
                "entity_id"))
            entity_id = entity_id.lower()

        baked_query += lambda q: q.order_by(States.entity_id,
                                            States.last_updated.desc())

        baked_query += lambda q: q.limit(bindparam("number_of_states"))

        states = execute(
            baked_query(session).params(number_of_states=number_of_states,
                                        entity_id=entity_id))

        entity_ids = [entity_id] if entity_id is not None else None

        return _sorted_states_to_json(
            hass,
            session,
            reversed(states),
            start_time,
            entity_ids,
            include_start_time_state=False,
        )
Exemple #8
0
    async def _load_history_from_db(self):
        """Load the history of the brightness values from the database.

        This only needs to be done once during startup.
        """
        from homeassistant.components.recorder.models import States
        start_date = datetime.now() - timedelta(days=self._conf_check_days)
        entity_id = self._readingmap.get(READING_BRIGHTNESS)
        if entity_id is None:
            _LOGGER.debug("not reading the history from the database as "
                          "there is no brightness sensor configured.")
            return

        _LOGGER.debug("initializing values for %s from the database",
                      self._name)
        with session_scope(hass=self.hass) as session:
            query = session.query(States).filter(
                (States.entity_id == entity_id.lower()) and
                (States.last_updated > start_date)
            ).order_by(States.last_updated.asc())
            states = execute(query)

            for state in states:
                # filter out all None, NaN and "unknown" states
                # only keep real values
                try:
                    self._brightness_history.add_measurement(
                        int(state.state), state.last_updated)
                except ValueError:
                    pass
        _LOGGER.debug("initializing from database completed")
        self.async_schedule_update_ha_state()
Exemple #9
0
def _get_events(hass, config, start_day, end_day, entity_id=None):
    """Get events for a period of time."""
    from homeassistant.components.recorder.models import Events, States
    from homeassistant.components.recorder.util import (
        execute, session_scope)

    entities_filter = _generate_filter_from_config(config)

    with session_scope(hass=hass) as session:
        if entity_id is not None:
            entity_ids = [entity_id.lower()]
        else:
            entity_ids = _get_related_entity_ids(session, entities_filter)

        query = session.query(Events).order_by(Events.time_fired) \
            .outerjoin(States, (Events.event_id == States.event_id)) \
            .filter(Events.event_type.in_(ALL_EVENT_TYPES)) \
            .filter((Events.time_fired > start_day)
                    & (Events.time_fired < end_day)) \
            .filter(((States.last_updated == States.last_changed) &
                     States.entity_id.in_(entity_ids))
                    | (States.state_id.is_(None)))

        events = execute(query)

    return humanify(hass, _exclude_events(events, entities_filter))
def _get_events(hass, config, start_day, end_day, entity_id=None):
    """Get events for a period of time."""
    from homeassistant.components.recorder.models import Events, States
    from homeassistant.components.recorder.util import (
        execute, session_scope)

    entities_filter = _generate_filter_from_config(config)

    with session_scope(hass=hass) as session:
        if entity_id is not None:
            entity_ids = [entity_id.lower()]
        else:
            entity_ids = _get_related_entity_ids(session, entities_filter)

        query = session.query(Events).order_by(Events.time_fired) \
            .outerjoin(States, (Events.event_id == States.event_id)) \
            .filter(Events.event_type.in_(ALL_EVENT_TYPES)) \
            .filter((Events.time_fired > start_day)
                    & (Events.time_fired < end_day)) \
            .filter(((States.last_updated == States.last_changed) &
                     States.entity_id.in_(entity_ids))
                    | (States.state_id.is_(None)))

        events = execute(query)

    return humanify(hass, _exclude_events(events, entities_filter))
Exemple #11
0
def get_significant_states(hass, start_time, end_time=None, entity_id=None,
                           filters=None):
    """
    Return states changes during UTC period start_time - end_time.

    Significant states are all states where there is a state change,
    as well as all states from certain domains (for instance
    thermostat so that we get current temperature in our graphs).
    """
    from homeassistant.components.recorder.models import States

    entity_ids = (entity_id.lower(), ) if entity_id is not None else None

    with session_scope(hass=hass) as session:
        query = session.query(States).filter(
            (States.domain.in_(SIGNIFICANT_DOMAINS) |
             (States.last_changed == States.last_updated)) &
            (States.last_updated > start_time))

        if filters:
            query = filters.apply(query, entity_ids)

        if end_time is not None:
            query = query.filter(States.last_updated < end_time)

        states = (
            state for state in execute(
                query.order_by(States.entity_id, States.last_updated))
            if (_is_significant(state) and
                not state.attributes.get(ATTR_HIDDEN, False)))

    return states_to_json(hass, states, start_time, entity_id, filters)
Exemple #12
0
def get_states(hass, utc_point_in_time, entity_ids=None, run=None,
               filters=None):
    """Return the states at a specific point in time."""
    from homeassistant.components.recorder.models import States

    if run is None:
        run = recorder.run_information(hass, utc_point_in_time)

        # History did not run before utc_point_in_time
        if run is None:
            return []

    from sqlalchemy import and_, func

    with session_scope(hass=hass) as session:
        most_recent_state_ids = session.query(
            func.max(States.state_id).label('max_state_id')
        ).filter(
            (States.created >= run.start) &
            (States.created < utc_point_in_time) &
            (~States.domain.in_(IGNORE_DOMAINS)))

        if filters:
            most_recent_state_ids = filters.apply(most_recent_state_ids,
                                                  entity_ids)

        most_recent_state_ids = most_recent_state_ids.group_by(
            States.entity_id).subquery()

        query = session.query(States).join(most_recent_state_ids, and_(
            States.state_id == most_recent_state_ids.c.max_state_id))

        return [state for state in execute(query)
                if not state.attributes.get(ATTR_HIDDEN, False)]
Exemple #13
0
def get_last_state_changes(hass, number_of_states, entity_id):
    """Return the last number_of_states."""
    start_time = dt_util.utcnow()

    with session_scope(hass=hass) as session:
        query = session.query(*QUERY_STATES).filter(
            States.last_changed == States.last_updated
        )

        if entity_id is not None:
            query = query.filter_by(entity_id=entity_id.lower())

        entity_ids = [entity_id] if entity_id is not None else None

        states = execute(
            query.order_by(States.entity_id, States.last_updated.desc()).limit(
                number_of_states
            )
        )

        return _sorted_states_to_json(
            hass,
            session,
            reversed(states),
            start_time,
            entity_ids,
            include_start_time_state=False,
        )
def test_recorder_bad_execute(hass_recorder):
    """Bad execute, retry 3 times."""
    from sqlalchemy.exc import SQLAlchemyError
    hass_recorder()

    def to_native():
        """Rasie exception."""
        raise SQLAlchemyError()

    mck1 = MagicMock()
    mck1.to_native = to_native

    with pytest.raises(SQLAlchemyError), \
            patch('homeassistant.components.recorder.time.sleep') as e_mock:
        util.execute((mck1,))

    assert e_mock.call_count == 2
def test_recorder_bad_execute(hass_recorder):
    """Bad execute, retry 3 times."""
    from sqlalchemy.exc import SQLAlchemyError
    hass_recorder()

    def to_native():
        """Rasie exception."""
        raise SQLAlchemyError()

    mck1 = MagicMock()
    mck1.to_native = to_native

    with pytest.raises(SQLAlchemyError), \
            patch('homeassistant.components.recorder.time.sleep') as e_mock:
        util.execute((mck1,))

    assert e_mock.call_count == 2
def test_recorder_bad_execute(hass_recorder):
    """Bad execute, retry 3 times."""
    from sqlalchemy.exc import SQLAlchemyError

    hass_recorder()

    def to_native(validate_entity_id=True):
        """Raise exception."""
        raise SQLAlchemyError()

    mck1 = MagicMock()
    mck1.to_native = to_native

    with pytest.raises(SQLAlchemyError), patch(
            "homeassistant.components.recorder.core.time.sleep") as e_mock:
        util.execute((mck1, ), to_native=True)

    assert e_mock.call_count == 2
Exemple #17
0
def _get_events(hass, start_day, end_day):
    """Get events for a period of time."""
    from homeassistant.components.recorder.models import Events
    from homeassistant.components.recorder.util import (execute, session_scope)

    with session_scope(hass=hass) as session:
        query = session.query(Events).order_by(
            Events.time_fired).filter((Events.time_fired > start_day)
                                      & (Events.time_fired < end_day))
        return execute(query)
Exemple #18
0
def _get_significant_states(
    hass,
    session,
    start_time,
    end_time=None,
    entity_ids=None,
    filters=None,
    include_start_time_state=True,
    significant_changes_only=True,
    minimal_response=False,
):
    """
    Return states changes during UTC period start_time - end_time.

    Significant states are all states where there is a state change,
    as well as all states from certain domains (for instance
    thermostat so that we get current temperature in our graphs).
    """
    timer_start = time.perf_counter()

    if significant_changes_only:
        query = session.query(*QUERY_STATES).filter(
            (
                States.domain.in_(SIGNIFICANT_DOMAINS)
                | (States.last_changed == States.last_updated)
            )
            & (States.last_updated > start_time)
        )
    else:
        query = session.query(*QUERY_STATES).filter(States.last_updated > start_time)

    if filters:
        query = filters.apply(query, entity_ids)

    if end_time is not None:
        query = query.filter(States.last_updated < end_time)

    query = query.order_by(States.entity_id, States.last_updated)

    states = execute(query)

    if _LOGGER.isEnabledFor(logging.DEBUG):
        elapsed = time.perf_counter() - timer_start
        _LOGGER.debug("get_significant_states took %fs", elapsed)

    return _sorted_states_to_json(
        hass,
        session,
        states,
        start_time,
        entity_ids,
        filters,
        include_start_time_state,
        minimal_response,
    )
Exemple #19
0
def _get_events(hass, start_day, end_day):
    """Get events for a period of time."""
    from homeassistant.components.recorder.models import Events
    from homeassistant.components.recorder.util import (
        execute, session_scope)

    with session_scope(hass=hass) as session:
        query = session.query(Events).order_by(
            Events.time_fired).filter(
                (Events.time_fired > start_day) &
                (Events.time_fired < end_day))
        return execute(query)
Exemple #20
0
    async def _async_initialize_from_database(self):
        """Initialize the list of states from the database.
		The query will get the list of states in DESCENDING order so that we
		can limit the result to self._sample_size. Afterwards reverse the
		list so that we get it in the right order again.
		If MaxAge is provided then query will restrict to entries younger then
		current datetime - MaxAge.
		"""
        # limit range
        records_older_then = datetime.datetime.now(get_localzone()).replace(
            microsecond=0, second=0, minute=0, hour=0)
        #_LOGGER.error("DB time limit:")
        #_LOGGER.error(records_older_then)

        with session_scope(hass=self.hass) as session:

            # grab grid data
            query = session.query(States).filter(States.entity_id == self._net)
            query = query.filter(States.created >= records_older_then)
            states_net = execute(query)

            # grab solar data
            query = session.query(States).filter(States.entity_id == self._gen)
            query = query.filter(States.created >= records_older_then)
            states_gen = execute(query)

            # merge and sort by date
            states = states_net + states_gen
            #_LOGGER.error(states[0].last_updated)

            states.sort(key=lambda x: x.last_updated)

            #_LOGGER.error(str(len(states))+" entries found in db")
            session.expunge_all()

        for state in states:
            #all should be older based on the filter .. but we've seen strange behavior
            #if(state.last_updated > records_older_then):
            self.add_state(entity="", new_state=state)
Exemple #21
0
def _get_events(hass, config, start_day, end_day):
    """Get events for a period of time."""
    from homeassistant.components.recorder.models import Events, States
    from homeassistant.components.recorder.util import (execute, session_scope)

    with session_scope(hass=hass) as session:
        query = session.query(Events).order_by(Events.time_fired) \
            .outerjoin(States, (Events.event_id == States.event_id))  \
            .filter(Events.event_type.in_(ALL_EVENT_TYPES)) \
            .filter((Events.time_fired > start_day)
                    & (Events.time_fired < end_day)) \
            .filter((States.last_updated == States.last_changed)
                    | (States.state_id.is_(None)))
        events = execute(query)
    return humanify(_exclude_events(events, config))
Exemple #22
0
def get_significant_states(
    hass,
    start_time,
    end_time=None,
    entity_ids=None,
    filters=None,
    include_start_time_state=True,
):
    """
    Return states changes during UTC period start_time - end_time.

    Significant states are all states where there is a state change,
    as well as all states from certain domains (for instance
    thermostat so that we get current temperature in our graphs).
    """
    timer_start = time.perf_counter()
    from homeassistant.components.recorder.models import States

    with session_scope(hass=hass) as session:
        query = session.query(States).filter(
            (
                States.domain.in_(SIGNIFICANT_DOMAINS)
                | (States.last_changed == States.last_updated)
            )
            & (States.last_updated > start_time)
        )

        if filters:
            query = filters.apply(query, entity_ids)

        if end_time is not None:
            query = query.filter(States.last_updated < end_time)

        query = query.order_by(States.last_updated)

        states = (
            state
            for state in execute(query)
            if (_is_significant(state) and not state.attributes.get(ATTR_HIDDEN, False))
        )

    if _LOGGER.isEnabledFor(logging.DEBUG):
        elapsed = time.perf_counter() - timer_start
        _LOGGER.debug("get_significant_states took %fs", elapsed)

    return states_to_json(
        hass, states, start_time, entity_ids, filters, include_start_time_state
    )
Exemple #23
0
def _get_events(hass, config, start_day, end_day):
    """Get events for a period of time."""
    from homeassistant.components.recorder.models import Events, States
    from homeassistant.components.recorder.util import (
        execute, session_scope)

    with session_scope(hass=hass) as session:
        query = session.query(Events).order_by(Events.time_fired) \
            .outerjoin(States, (Events.event_id == States.event_id))  \
            .filter(Events.event_type.in_(ALL_EVENT_TYPES)) \
            .filter((Events.time_fired > start_day)
                    & (Events.time_fired < end_day)) \
            .filter((States.last_updated == States.last_changed)
                    | (States.last_updated.is_(None)))
        events = execute(query)
    return humanify(_exclude_events(events, config))
Exemple #24
0
def _get_single_entity_states_with_session(hass, session, utc_point_in_time,
                                           entity_id):
    # Use an entirely different (and extremely fast) query if we only
    # have a single entity id
    baked_query = hass.data[HISTORY_BAKERY](
        lambda session: session.query(*QUERY_STATES))
    baked_query += lambda q: q.filter(
        States.last_updated < bindparam("utc_point_in_time"),
        States.entity_id == bindparam("entity_id"),
    )
    baked_query += lambda q: q.order_by(States.last_updated.desc())
    baked_query += lambda q: q.limit(1)

    query = baked_query(session).params(utc_point_in_time=utc_point_in_time,
                                        entity_id=entity_id)

    return [LazyState(row) for row in execute(query)]
Exemple #25
0
def state_changes_during_period(hass, start_time, end_time=None, entity_id=None):
    """Return states changes during UTC period start_time - end_time."""
    with session_scope(hass=hass) as session:
        query = session.query(*QUERY_STATES).filter(
            (States.last_changed == States.last_updated)
            & (States.last_updated > start_time)
        )

        if end_time is not None:
            query = query.filter(States.last_updated < end_time)

        if entity_id is not None:
            query = query.filter_by(entity_id=entity_id.lower())

        entity_ids = [entity_id] if entity_id is not None else None

        states = execute(query.order_by(States.entity_id, States.last_updated))

        return _sorted_states_to_json(hass, session, states, start_time, entity_ids)
Exemple #26
0
def state_changes_during_period(hass, start_time, end_time=None,
                                entity_id=None):
    """Return states changes during UTC period start_time - end_time."""
    from homeassistant.components.recorder.models import States

    with session_scope(hass=hass) as session:
        query = session.query(States).filter(
            (States.last_changed == States.last_updated) &
            (States.last_updated > start_time))

        if end_time is not None:
            query = query.filter(States.last_updated < end_time)

        if entity_id is not None:
            query = query.filter_by(entity_id=entity_id.lower())

        states = execute(
            query.order_by(States.last_updated))

    return states_to_json(hass, states, start_time, entity_id)
Exemple #27
0
    async def _initialize_from_database(self) -> None:
        """Initialize the list of states from the database.

        The query will get the list of states in DESCENDING order so that we
        can limit the result to self._sample_size. Afterwards reverse the
        list so that we get it in the right order again.

        If MaxAge is provided then query will restrict to entries younger then
        current datetime - MaxAge.
        """

        _LOGGER.debug("%s: initializing values from the database", self.entity_id)

        with session_scope(hass=self.hass) as session:
            query = session.query(States).filter(
                States.entity_id == self._source_entity_id.lower()
            )

            if self._samples_max_age is not None:
                records_older_then = dt_util.utcnow() - self._samples_max_age
                _LOGGER.debug(
                    "%s: retrieve records not older then %s",
                    self.entity_id,
                    records_older_then,
                )
                query = query.filter(States.last_updated >= records_older_then)
            else:
                _LOGGER.debug("%s: retrieving all records", self.entity_id)

            query = query.order_by(States.last_updated.desc()).limit(
                self._samples_max_buffer_size
            )
            states = execute(query, to_native=True, validate_entity_ids=False)

        if states:
            for state in reversed(states):
                self._add_state_to_queue(state)

        self.async_schedule_update_ha_state(True)

        _LOGGER.debug("%s: initializing from database completed", self.entity_id)
Exemple #28
0
def state_changes_during_period(hass,
                                start_time,
                                end_time=None,
                                entity_id=None):
    """Return states changes during UTC period start_time - end_time."""
    from homeassistant.components.recorder.models import States

    with session_scope(hass=hass) as session:
        query = session.query(
            States).filter((States.last_changed == States.last_updated)
                           & (States.last_changed > start_time))

        if end_time is not None:
            query = query.filter(States.last_updated < end_time)

        if entity_id is not None:
            query = query.filter_by(entity_id=entity_id.lower())

        states = execute(query.order_by(States.entity_id, States.last_updated))

    return states_to_json(hass, states, start_time, entity_id)
    def _initialize_from_database(self):
        """Initialize the list of states from the database.

        The query will get the list of states in DESCENDING order so that we
        can limit the result to self._sample_size. Afterwards reverse the
        list so that we get it in the right order again.
        """
        from homeassistant.components.recorder.models import States
        _LOGGER.debug("initializing values for %s from the database",
                      self.entity_id)

        with session_scope(hass=self._hass) as session:
            query = session.query(States)\
                .filter(States.entity_id == self._entity_id.lower())\
                .order_by(States.last_updated.desc())\
                .limit(self._sampling_size)
            states = execute(query)

        for state in reversed(states):
            self._add_state_to_queue(state)

        _LOGGER.debug("initializing from database completed")
    def _initialize_from_database(self):
        """Initialize the list of states from the database.

        The query will get the list of states in DESCENDING order so that we
        can limit the result to self._sample_size. Afterwards reverse the
        list so that we get it in the right order again.
        """
        from homeassistant.components.recorder.models import States
        _LOGGER.debug("initializing values for %s from the database",
                      self.entity_id)

        with session_scope(hass=self._hass) as session:
            query = session.query(States)\
                .filter(States.entity_id == self._entity_id.lower())\
                .order_by(States.last_updated.desc())\
                .limit(self._sampling_size)
            states = execute(query)

        for state in reversed(states):
            self._add_state_to_queue(state)

        _LOGGER.debug("initializing from database completed")
Exemple #31
0
def get_last_state_changes(hass, number_of_states, entity_id):
    """Return the last number_of_states."""
    from homeassistant.components.recorder.models import States

    start_time = dt_util.utcnow()

    with session_scope(hass=hass) as session:
        query = session.query(States).filter(
            (States.last_changed == States.last_updated))

        if entity_id is not None:
            query = query.filter_by(entity_id=entity_id.lower())

        entity_ids = [entity_id] if entity_id is not None else None

        states = execute(
            query.order_by(States.last_updated.desc()).limit(number_of_states))

    return states_to_json(hass, reversed(states),
                          start_time,
                          entity_ids,
                          include_start_time_state=False)
Exemple #32
0
def get_states(hass,
               utc_point_in_time,
               entity_ids=None,
               run=None,
               filters=None):
    """Return the states at a specific point in time."""
    from homeassistant.components.recorder.models import States

    if run is None:
        run = recorder.run_information(hass, utc_point_in_time)

        # History did not run before utc_point_in_time
        if run is None:
            return []

    from sqlalchemy import and_, func

    with session_scope(hass=hass) as session:
        most_recent_state_ids = session.query(
            func.max(States.state_id).label(
                'max_state_id')).filter((States.created >= run.start)
                                        & (States.created < utc_point_in_time)
                                        & (~States.domain.in_(IGNORE_DOMAINS)))

        if filters:
            most_recent_state_ids = filters.apply(most_recent_state_ids,
                                                  entity_ids)

        most_recent_state_ids = most_recent_state_ids.group_by(
            States.entity_id).subquery()

        query = session.query(States).join(
            most_recent_state_ids,
            and_(States.state_id == most_recent_state_ids.c.max_state_id))

        return [
            state for state in execute(query)
            if not state.attributes.get(ATTR_HIDDEN, False)
        ]
def get_last_state_changes(hass, number_of_states, entity_id):
    """Return the last number_of_states."""
    from homeassistant.components.recorder.models import States

    start_time = dt_util.utcnow()

    with session_scope(hass=hass) as session:
        query = session.query(States).filter(
            (States.last_changed == States.last_updated))

        if entity_id is not None:
            query = query.filter_by(entity_id=entity_id.lower())

        entity_ids = [entity_id] if entity_id is not None else None

        states = execute(
            query.order_by(States.last_updated.desc()).limit(number_of_states))

    return states_to_json(hass, reversed(states),
                          start_time,
                          entity_ids,
                          include_start_time_state=False)
def get_significant_states(hass, start_time, end_time=None, entity_ids=None,
                           filters=None, include_start_time_state=True):
    """
    Return states changes during UTC period start_time - end_time.

    Significant states are all states where there is a state change,
    as well as all states from certain domains (for instance
    thermostat so that we get current temperature in our graphs).
    """
    timer_start = time.perf_counter()
    from homeassistant.components.recorder.models import States

    with session_scope(hass=hass) as session:
        query = session.query(States).filter(
            (States.domain.in_(SIGNIFICANT_DOMAINS) |
             (States.last_changed == States.last_updated)) &
            (States.last_updated > start_time))

        if filters:
            query = filters.apply(query, entity_ids)

        if end_time is not None:
            query = query.filter(States.last_updated < end_time)

        query = query.order_by(States.last_updated)

        states = (
            state for state in execute(query)
            if (_is_significant(state) and
                not state.attributes.get(ATTR_HIDDEN, False)))

    if _LOGGER.isEnabledFor(logging.DEBUG):
        elapsed = time.perf_counter() - timer_start
        _LOGGER.debug(
            'get_significant_states took %fs', elapsed)

    return states_to_json(
        hass, states, start_time, entity_ids, filters,
        include_start_time_state)
Exemple #35
0
    async def _async_initialize_from_database(self):
        """Initialize the list of states from the database.

        The query will get the list of states in DESCENDING order so that we
        can limit the result to self._sample_size. Afterwards reverse the
        list so that we get it in the right order again.

        If MaxAge is provided then query will restrict to entries younger then
        current datetime - MaxAge.
        """
        from homeassistant.components.recorder.models import States
        _LOGGER.debug("%s: initializing values from the database",
                      self.entity_id)

        with session_scope(hass=self.hass) as session:
            query = session.query(States)\
                .filter(States.entity_id == self._entity_id.lower())

            if self._max_age is not None:
                records_older_then = dt_util.utcnow() - self._max_age
                _LOGGER.debug("%s: retrieve records not older then %s",
                              self.entity_id, records_older_then)
                query = query.filter(States.last_updated >= records_older_then)
            else:
                _LOGGER.debug("%s: retrieving all records.", self.entity_id)

            query = query\
                .order_by(States.last_updated.desc())\
                .limit(self._sampling_size)
            states = execute(query)

        for state in reversed(states):
            self._add_state_to_queue(state)

        self.async_schedule_update_ha_state(True)

        _LOGGER.debug("%s: initializing from database completed",
                      self.entity_id)
Exemple #36
0
def _get_states_with_session(hass,
                             session,
                             utc_point_in_time,
                             entity_ids=None,
                             run=None,
                             filters=None):
    """Return the states at a specific point in time."""
    if entity_ids and len(entity_ids) == 1:
        return _get_single_entity_states_with_session(hass, session,
                                                      utc_point_in_time,
                                                      entity_ids[0])

    if run is None:
        run = recorder.run_information_with_session(session, utc_point_in_time)

        # History did not run before utc_point_in_time
        if run is None:
            return []

    # We have more than one entity to look at so we need to do a query on states
    # since the last recorder run started.
    query = session.query(*QUERY_STATES)

    if entity_ids:
        # We got an include-list of entities, accelerate the query by filtering already
        # in the inner query.
        most_recent_state_ids = (session.query(
            func.max(States.state_id).label("max_state_id"), ).filter(
                (States.last_updated >= run.start)
                & (States.last_updated < utc_point_in_time)).filter(
                    States.entity_id.in_(entity_ids)))
        most_recent_state_ids = most_recent_state_ids.group_by(
            States.entity_id)
        most_recent_state_ids = most_recent_state_ids.subquery()
        query = query.join(
            most_recent_state_ids,
            States.state_id == most_recent_state_ids.c.max_state_id,
        )
    else:
        # We did not get an include-list of entities, query all states in the inner
        # query, then filter out unwanted domains as well as applying the custom filter.
        # This filtering can't be done in the inner query because the domain column is
        # not indexed and we can't control what's in the custom filter.
        most_recent_states_by_date = (session.query(
            States.entity_id.label("max_entity_id"),
            func.max(States.last_updated).label("max_last_updated"),
        ).filter((States.last_updated >= run.start)
                 & (States.last_updated < utc_point_in_time)).group_by(
                     States.entity_id).subquery())
        most_recent_state_ids = (session.query(
            func.max(States.state_id).label("max_state_id")).join(
                most_recent_states_by_date,
                and_(
                    States.entity_id ==
                    most_recent_states_by_date.c.max_entity_id,
                    States.last_updated ==
                    most_recent_states_by_date.c.max_last_updated,
                ),
            ).group_by(States.entity_id).subquery())
        query = query.join(
            most_recent_state_ids,
            States.state_id == most_recent_state_ids.c.max_state_id,
        )
        query = query.filter(~States.domain.in_(IGNORE_DOMAINS))
        if filters:
            query = filters.apply(query)

    return [LazyState(row) for row in execute(query)]
Exemple #37
0
def _get_states_with_session(
    session, utc_point_in_time, entity_ids=None, run=None, filters=None
):
    """Return the states at a specific point in time."""
    query = session.query(*QUERY_STATES)

    if entity_ids and len(entity_ids) == 1:
        # Use an entirely different (and extremely fast) query if we only
        # have a single entity id
        query = (
            query.filter(
                States.last_updated < utc_point_in_time,
                States.entity_id.in_(entity_ids),
            )
            .order_by(States.last_updated.desc())
            .limit(1)
        )
        return [LazyState(row) for row in execute(query)]

    if run is None:
        run = recorder.run_information_with_session(session, utc_point_in_time)

        # History did not run before utc_point_in_time
        if run is None:
            return []

    # We have more than one entity to look at (most commonly we want
    # all entities,) so we need to do a search on all states since the
    # last recorder run started.

    most_recent_states_by_date = session.query(
        States.entity_id.label("max_entity_id"),
        func.max(States.last_updated).label("max_last_updated"),
    ).filter(
        (States.last_updated >= run.start) & (States.last_updated < utc_point_in_time)
    )

    if entity_ids:
        most_recent_states_by_date.filter(States.entity_id.in_(entity_ids))

    most_recent_states_by_date = most_recent_states_by_date.group_by(States.entity_id)

    most_recent_states_by_date = most_recent_states_by_date.subquery()

    most_recent_state_ids = session.query(
        func.max(States.state_id).label("max_state_id")
    ).join(
        most_recent_states_by_date,
        and_(
            States.entity_id == most_recent_states_by_date.c.max_entity_id,
            States.last_updated == most_recent_states_by_date.c.max_last_updated,
        ),
    )

    most_recent_state_ids = most_recent_state_ids.group_by(States.entity_id)

    most_recent_state_ids = most_recent_state_ids.subquery()

    query = query.join(
        most_recent_state_ids, States.state_id == most_recent_state_ids.c.max_state_id,
    ).filter(~States.domain.in_(IGNORE_DOMAINS))

    if filters:
        query = filters.apply(query, entity_ids)

    return [LazyState(row) for row in execute(query)]
Exemple #38
0
def get_significant_states_with_session(
    hass,
    session,
    start_time,
    end_time=None,
    entity_ids=None,
    filters=None,
    include_start_time_state=True,
    significant_changes_only=True,
    minimal_response=False,
):
    """
    Return states changes during UTC period start_time - end_time.

    entity_ids is an optional iterable of entities to include in the results.

    filters is an optional SQLAlchemy filter which will be applied to the database
    queries unless entity_ids is given, in which case its ignored.

    Significant states are all states where there is a state change,
    as well as all states from certain domains (for instance
    thermostat so that we get current temperature in our graphs).
    """
    timer_start = time.perf_counter()

    baked_query = hass.data[HISTORY_BAKERY](
        lambda session: session.query(*QUERY_STATES))

    if significant_changes_only:
        baked_query += lambda q: q.filter(
            (States.domain.in_(SIGNIFICANT_DOMAINS)
             | (States.last_changed == States.last_updated))
            & (States.last_updated > bindparam("start_time")))
    else:
        baked_query += lambda q: q.filter(States.last_updated > bindparam(
            "start_time"))

    if entity_ids is not None:
        baked_query += lambda q: q.filter(
            States.entity_id.in_(bindparam("entity_ids", expanding=True)))
    else:
        baked_query += lambda q: q.filter(~States.domain.in_(IGNORE_DOMAINS))
        if filters:
            filters.bake(baked_query)

    if end_time is not None:
        baked_query += lambda q: q.filter(States.last_updated < bindparam(
            "end_time"))

    baked_query += lambda q: q.order_by(States.entity_id, States.last_updated)

    states = execute(
        baked_query(session).params(start_time=start_time,
                                    end_time=end_time,
                                    entity_ids=entity_ids))

    if _LOGGER.isEnabledFor(logging.DEBUG):
        elapsed = time.perf_counter() - timer_start
        _LOGGER.debug("get_significant_states took %fs", elapsed)

    return _sorted_states_to_dict(
        hass,
        session,
        states,
        start_time,
        entity_ids,
        filters,
        include_start_time_state,
        minimal_response,
    )
def get_states(hass, utc_point_in_time, entity_ids=None, run=None,
               filters=None):
    """Return the states at a specific point in time."""
    from homeassistant.components.recorder.models import States

    if run is None:
        run = recorder.run_information(hass, utc_point_in_time)

        # History did not run before utc_point_in_time
        if run is None:
            return []

    from sqlalchemy import and_, func

    with session_scope(hass=hass) as session:
        if entity_ids and len(entity_ids) == 1:
            # Use an entirely different (and extremely fast) query if we only
            # have a single entity id
            most_recent_state_ids = session.query(
                States.state_id.label('max_state_id')
            ).filter(
                (States.last_updated < utc_point_in_time) &
                (States.entity_id.in_(entity_ids))
            ).order_by(
                States.last_updated.desc())

            most_recent_state_ids = most_recent_state_ids.limit(1)

        else:
            # We have more than one entity to look at (most commonly we want
            # all entities,) so we need to do a search on all states since the
            # last recorder run started.

            most_recent_states_by_date = session.query(
                States.entity_id.label('max_entity_id'),
                func.max(States.last_updated).label('max_last_updated')
            ).filter(
                (States.last_updated >= run.start) &
                (States.last_updated < utc_point_in_time)
            )

            if entity_ids:
                most_recent_states_by_date.filter(
                    States.entity_id.in_(entity_ids))

            most_recent_states_by_date = most_recent_states_by_date.group_by(
                States.entity_id)

            most_recent_states_by_date = most_recent_states_by_date.subquery()

            most_recent_state_ids = session.query(
                func.max(States.state_id).label('max_state_id')
            ).join(most_recent_states_by_date, and_(
                States.entity_id == most_recent_states_by_date.c.max_entity_id,
                States.last_updated == most_recent_states_by_date.c.
                max_last_updated))

            most_recent_state_ids = most_recent_state_ids.group_by(
                States.entity_id)

        most_recent_state_ids = most_recent_state_ids.subquery()

        query = session.query(States).join(
            most_recent_state_ids,
            States.state_id == most_recent_state_ids.c.max_state_id
        ).filter((~States.domain.in_(IGNORE_DOMAINS)))

        if filters:
            query = filters.apply(query, entity_ids)

        return [state for state in execute(query)
                if not state.attributes.get(ATTR_HIDDEN, False)]