예제 #1
0
def state_changes_during_period(opp,
                                start_time,
                                end_time=None,
                                entity_id=None):
    """Return states changes during UTC period start_time - end_time."""
    with session_scope(opp=opp) as session:
        baked_query = opp.data[HISTORY_BAKERY](
            lambda session: session.query(*QUERY_STATES))

        baked_query += lambda q: q.filter(
            (States.last_changed == States.last_updated)
            & (States.last_updated > bindparam("start_time")))

        if end_time is not None:
            baked_query += lambda q: q.filter(States.last_updated < bindparam(
                "end_time"))

        if entity_id is not None:
            baked_query += lambda q: q.filter_by(entity_id=bindparam(
                "entity_id"))
            entity_id = entity_id.lower()

        baked_query += lambda q: q.order_by(States.entity_id, States.
                                            last_updated)

        states = execute(
            baked_query(session).params(start_time=start_time,
                                        end_time=end_time,
                                        entity_id=entity_id))

        entity_ids = [entity_id] if entity_id is not None else None

        return _sorted_states_to_dict(opp, session, states, start_time,
                                      entity_ids)
예제 #2
0
def get_last_state_changes(opp, number_of_states, entity_id):
    """Return the last number_of_states."""
    start_time = dt_util.utcnow()

    with session_scope(opp=opp) as session:
        baked_query = opp.data[HISTORY_BAKERY](
            lambda session: session.query(*QUERY_STATES))
        baked_query += lambda q: q.filter(States.last_changed == States.
                                          last_updated)

        if entity_id is not None:
            baked_query += lambda q: q.filter_by(entity_id=bindparam(
                "entity_id"))
            entity_id = entity_id.lower()

        baked_query += lambda q: q.order_by(States.entity_id,
                                            States.last_updated.desc())

        baked_query += lambda q: q.limit(bindparam("number_of_states"))

        states = execute(
            baked_query(session).params(number_of_states=number_of_states,
                                        entity_id=entity_id))

        entity_ids = [entity_id] if entity_id is not None else None

        return _sorted_states_to_dict(
            opp,
            session,
            reversed(states),
            start_time,
            entity_ids,
            include_start_time_state=False,
        )
예제 #3
0
    def _load_history_from_db(self):
        """Load the history of the brightness values from the database.

        This only needs to be done once during startup.
        """

        start_date = datetime.now() - timedelta(days=self._conf_check_days)
        entity_id = self._readingmap.get(READING_BRIGHTNESS)
        if entity_id is None:
            _LOGGER.debug("Not reading the history from the database as "
                          "there is no brightness sensor configured")
            return

        _LOGGER.debug("Initializing values for %s from the database",
                      self._name)
        with session_scope(opp=self.opp) as session:
            query = (session.query(States).filter(
                (States.entity_id == entity_id.lower())
                and (States.last_updated > start_date)).order_by(
                    States.last_updated.asc()))
            states = execute(query, to_native=True, validate_entity_ids=False)

            for state in states:
                # filter out all None, NaN and "unknown" states
                # only keep real values
                with suppress(ValueError):
                    self._brightness_history.add_measurement(
                        int(state.state), state.last_updated)
        _LOGGER.debug("Initializing from database completed")
예제 #4
0
def test_recorder_bad_execute(opp_recorder):
    """Bad execute, retry 3 times."""
    from sqlalchemy.exc import SQLAlchemyError

    opp_recorder()

    def to_native():
        """Rasie exception."""
        raise SQLAlchemyError()

    mck1 = MagicMock()
    mck1.to_native = to_native

    with pytest.raises(SQLAlchemyError), patch(
            "openpeerpower.components.recorder.time.sleep") as e_mock:
        util.execute((mck1, ))

    assert e_mock.call_count == 2
예제 #5
0
def _get_single_entity_states_with_session(opp, session, utc_point_in_time,
                                           entity_id):
    # Use an entirely different (and extremely fast) query if we only
    # have a single entity id
    baked_query = opp.data[HISTORY_BAKERY](
        lambda session: session.query(*QUERY_STATES))
    baked_query += lambda q: q.filter(
        States.last_updated < bindparam("utc_point_in_time"),
        States.entity_id == bindparam("entity_id"),
    )
    baked_query += lambda q: q.order_by(States.last_updated.desc())
    baked_query += lambda q: q.limit(1)

    query = baked_query(session).params(utc_point_in_time=utc_point_in_time,
                                        entity_id=entity_id)

    return [LazyState(row) for row in execute(query)]
예제 #6
0
def get_significant_states(
    opp,
    start_time,
    end_time=None,
    entity_ids=None,
    filters=None,
    include_start_time_state=True,
):
    """
    Return states changes during UTC period start_time - end_time.

    Significant states are all states where there is a state change,
    as well as all states from certain domains (for instance
    thermostat so that we get current temperature in our graphs).
    """
    timer_start = time.perf_counter()

    with session_scope(opp=opp) as session:
        query = session.query(States).filter(
            (States.domain.in_(SIGNIFICANT_DOMAINS)
             | (States.last_changed == States.last_updated))
            & (States.last_updated > start_time))

        if filters:
            query = filters.apply(query, entity_ids)

        if end_time is not None:
            query = query.filter(States.last_updated < end_time)

        query = query.order_by(States.last_updated)

        states = (state for state in execute(query)
                  if (_is_significant(state)
                      and not state.attributes.get(ATTR_HIDDEN, False)))

    if _LOGGER.isEnabledFor(logging.DEBUG):
        elapsed = time.perf_counter() - timer_start
        _LOGGER.debug("get_significant_states took %fs", elapsed)

    return states_to_json(opp, states, start_time, entity_ids, filters,
                          include_start_time_state)
예제 #7
0
    async def _async_initialize_from_database(self):
        """Initialize the list of states from the database.

        The query will get the list of states in DESCENDING order so that we
        can limit the result to self._sample_size. Afterwards reverse the
        list so that we get it in the right order again.

        If MaxAge is provided then query will restrict to entries younger then
        current datetime - MaxAge.
        """

        _LOGGER.debug("%s: initializing values from the database", self.entity_id)

        with session_scope(opp=self.opp) as session:
            query = session.query(States).filter(
                States.entity_id == self._entity_id.lower()
            )

            if self._max_age is not None:
                records_older_then = dt_util.utcnow() - self._max_age
                _LOGGER.debug(
                    "%s: retrieve records not older then %s",
                    self.entity_id,
                    records_older_then,
                )
                query = query.filter(States.last_updated >= records_older_then)
            else:
                _LOGGER.debug("%s: retrieving all records", self.entity_id)

            query = query.order_by(States.last_updated.desc()).limit(
                self._sampling_size
            )
            states = execute(query, to_native=True, validate_entity_ids=False)

        for state in reversed(states):
            self._add_state_to_queue(state)

        self.async_schedule_update_op_state(True)

        _LOGGER.debug("%s: initializing from database completed", self.entity_id)
예제 #8
0
def state_changes_during_period(opp,
                                start_time,
                                end_time=None,
                                entity_id=None):
    """Return states changes during UTC period start_time - end_time."""

    with session_scope(opp=opp) as session:
        query = session.query(States).filter(
            (States.last_changed == States.last_updated)
            & (States.last_updated > start_time))

        if end_time is not None:
            query = query.filter(States.last_updated < end_time)

        if entity_id is not None:
            query = query.filter_by(entity_id=entity_id.lower())

        entity_ids = [entity_id] if entity_id is not None else None

        states = execute(query.order_by(States.last_updated))

    return states_to_json(opp, states, start_time, entity_ids)
예제 #9
0
def get_last_state_changes(opp, number_of_states, entity_id):
    """Return the last number_of_states."""

    start_time = dt_util.utcnow()

    with session_scope(opp=opp) as session:
        query = session.query(States).filter(
            (States.last_changed == States.last_updated))

        if entity_id is not None:
            query = query.filter_by(entity_id=entity_id.lower())

        entity_ids = [entity_id] if entity_id is not None else None

        states = execute(
            query.order_by(States.last_updated.desc()).limit(number_of_states))

    return states_to_json(opp,
                          reversed(states),
                          start_time,
                          entity_ids,
                          include_start_time_state=False)
예제 #10
0
def _get_significant_states(
    opp,
    session,
    start_time,
    end_time=None,
    entity_ids=None,
    filters=None,
    include_start_time_state=True,
    significant_changes_only=True,
    minimal_response=False,
):
    """
    Return states changes during UTC period start_time - end_time.

    Significant states are all states where there is a state change,
    as well as all states from certain domains (for instance
    thermostat so that we get current temperature in our graphs).
    """
    timer_start = time.perf_counter()

    baked_query = opp.data[HISTORY_BAKERY](
        lambda session: session.query(*QUERY_STATES))

    if significant_changes_only:
        baked_query += lambda q: q.filter(
            (States.domain.in_(SIGNIFICANT_DOMAINS)
             | (States.last_changed == States.last_updated))
            & (States.last_updated > bindparam("start_time")))
    else:
        baked_query += lambda q: q.filter(States.last_updated > bindparam(
            "start_time"))

    if entity_ids is not None:
        baked_query += lambda q: q.filter(
            States.entity_id.in_(bindparam("entity_ids", expanding=True)))
    else:
        baked_query += lambda q: q.filter(~States.domain.in_(IGNORE_DOMAINS))
        if filters:
            filters.bake(baked_query)

    if end_time is not None:
        baked_query += lambda q: q.filter(States.last_updated < bindparam(
            "end_time"))

    baked_query += lambda q: q.order_by(States.entity_id, States.last_updated)

    states = execute(
        baked_query(session).params(start_time=start_time,
                                    end_time=end_time,
                                    entity_ids=entity_ids))

    if _LOGGER.isEnabledFor(logging.DEBUG):
        elapsed = time.perf_counter() - timer_start
        _LOGGER.debug("get_significant_states took %fs", elapsed)

    return _sorted_states_to_dict(
        opp,
        session,
        states,
        start_time,
        entity_ids,
        filters,
        include_start_time_state,
        minimal_response,
    )
예제 #11
0
def _get_states_with_session(opp,
                             session,
                             utc_point_in_time,
                             entity_ids=None,
                             run=None,
                             filters=None):
    """Return the states at a specific point in time."""
    if entity_ids and len(entity_ids) == 1:
        return _get_single_entity_states_with_session(opp, session,
                                                      utc_point_in_time,
                                                      entity_ids[0])

    if run is None:
        run = recorder.run_information_with_session(session, utc_point_in_time)

        # History did not run before utc_point_in_time
        if run is None:
            return []

    # We have more than one entity to look at (most commonly we want
    # all entities,) so we need to do a search on all states since the
    # last recorder run started.
    query = session.query(*QUERY_STATES)

    most_recent_states_by_date = session.query(
        States.entity_id.label("max_entity_id"),
        func.max(States.last_updated).label("max_last_updated"),
    ).filter((States.last_updated >= run.start)
             & (States.last_updated < utc_point_in_time))

    if entity_ids:
        most_recent_states_by_date.filter(States.entity_id.in_(entity_ids))

    most_recent_states_by_date = most_recent_states_by_date.group_by(
        States.entity_id)

    most_recent_states_by_date = most_recent_states_by_date.subquery()

    most_recent_state_ids = session.query(
        func.max(States.state_id).label("max_state_id")).join(
            most_recent_states_by_date,
            and_(
                States.entity_id == most_recent_states_by_date.c.max_entity_id,
                States.last_updated ==
                most_recent_states_by_date.c.max_last_updated,
            ),
        )

    most_recent_state_ids = most_recent_state_ids.group_by(States.entity_id)

    most_recent_state_ids = most_recent_state_ids.subquery()

    query = query.join(
        most_recent_state_ids,
        States.state_id == most_recent_state_ids.c.max_state_id,
    )

    if entity_ids is not None:
        query = query.filter(States.entity_id.in_(entity_ids))
    else:
        query = query.filter(~States.domain.in_(IGNORE_DOMAINS))
        if filters:
            query = filters.apply(query)

    return [LazyState(row) for row in execute(query)]
예제 #12
0
def get_states(opp,
               utc_point_in_time,
               entity_ids=None,
               run=None,
               filters=None):
    """Return the states at a specific point in time."""

    if run is None:
        run = recorder.run_information(opp, utc_point_in_time)

        # History did not run before utc_point_in_time
        if run is None:
            return []

    with session_scope(opp=opp) as session:
        query = session.query(States)

        if entity_ids and len(entity_ids) == 1:
            # Use an entirely different (and extremely fast) query if we only
            # have a single entity id
            query = (query.filter(
                States.last_updated >= run.start,
                States.last_updated < utc_point_in_time,
                States.entity_id.in_(entity_ids),
            ).order_by(States.last_updated.desc()).limit(1))

        else:
            # We have more than one entity to look at (most commonly we want
            # all entities,) so we need to do a search on all states since the
            # last recorder run started.

            most_recent_states_by_date = session.query(
                States.entity_id.label("max_entity_id"),
                func.max(States.last_updated).label("max_last_updated"),
            ).filter((States.last_updated >= run.start)
                     & (States.last_updated < utc_point_in_time))

            if entity_ids:
                most_recent_states_by_date.filter(
                    States.entity_id.in_(entity_ids))

            most_recent_states_by_date = most_recent_states_by_date.group_by(
                States.entity_id)

            most_recent_states_by_date = most_recent_states_by_date.subquery()

            most_recent_state_ids = session.query(
                func.max(States.state_id).label("max_state_id")).join(
                    most_recent_states_by_date,
                    and_(
                        States.entity_id ==
                        most_recent_states_by_date.c.max_entity_id,
                        States.last_updated ==
                        most_recent_states_by_date.c.max_last_updated,
                    ),
                )

            most_recent_state_ids = most_recent_state_ids.group_by(
                States.entity_id)

            most_recent_state_ids = most_recent_state_ids.subquery()

            query = query.join(
                most_recent_state_ids,
                States.state_id == most_recent_state_ids.c.max_state_id,
            ).filter(~States.domain.in_(IGNORE_DOMAINS))

            if filters:
                query = filters.apply(query, entity_ids)

        return [
            state for state in execute(query)
            if not state.attributes.get(ATTR_HIDDEN, False)
        ]