Beispiel #1
0
    def get(self, request, date=None):
        """Retrieve logbook entries."""
        if date:
            start_day = dt_util.start_of_local_day(date)
        else:
            start_day = dt_util.start_of_local_day()

        end_day = start_day + timedelta(days=1)

        events = recorder.query_events(
            QUERY_EVENTS_BETWEEN,
            (dt_util.as_utc(start_day), dt_util.as_utc(end_day)))

        return self.json(humanify(events))
Beispiel #2
0
    async def get(self, request, datetime=None):
        """Retrieve logbook entries."""
        if datetime:
            datetime = dt_util.parse_datetime(datetime)

            if datetime is None:
                return self.json_message('Invalid datetime', HTTP_BAD_REQUEST)
        else:
            datetime = dt_util.start_of_local_day()

        period = request.query.get('period')
        if period is None:
            period = 1
        else:
            period = int(period)

        entity_id = request.query.get('entity')
        start_day = dt_util.as_utc(datetime) - timedelta(days=period - 1)
        end_day = start_day + timedelta(days=period)
        hass = request.app['hass']

        def json_events():
            """Fetch events and generate JSON."""
            return self.json(list(
                _get_events(hass, self.config, start_day, end_day, entity_id)))

        return await hass.async_add_job(json_events)
def convert_time_to_utc(timestr):
    """Take a string like 08:00:00 and convert it to a unix timestamp."""
    combined = datetime.combine(
        dt_util.start_of_local_day(), dt_util.parse_time(timestr))
    if combined < datetime.now():
        combined = combined + timedelta(days=1)
    return dt_util.as_timestamp(combined)
Beispiel #4
0
    def get(self, request, datetime=None):
        """Retrieve logbook entries."""
        if datetime:
            datetime = dt_util.parse_datetime(datetime)

            if datetime is None:
                return self.json_message('Invalid datetime', HTTP_BAD_REQUEST)
        else:
            datetime = dt_util.start_of_local_day()

        start_day = dt_util.as_utc(datetime)
        end_day = start_day + timedelta(days=1)

        def get_results():
            """Query DB for results."""
            events = recorder.get_model('Events')
            query = recorder.query('Events').order_by(
                events.time_fired).filter(
                    (events.time_fired > start_day) &
                    (events.time_fired < end_day))
            events = recorder.execute(query)
            return _exclude_events(events, self.config)

        events = yield from request.app['hass'].loop.run_in_executor(
            None, get_results)

        return self.json(humanify(events))
 def _get_date(date):
     """Get the dateTime from date or dateTime as a local."""
     if 'date' in date:
         return dt.start_of_local_day(dt.dt.datetime.combine(
             dt.parse_date(date['date']), dt.dt.time.min))
     else:
         return dt.as_local(dt.parse_datetime(date['dateTime']))
Beispiel #6
0
    def get(self, request, date=None):
        """Retrieve logbook entries."""
        if date:
            start_day = dt_util.start_of_local_day(date)
        else:
            start_day = dt_util.start_of_local_day()

        start_day = dt_util.as_utc(start_day)
        end_day = start_day + timedelta(days=1)

        events = recorder.get_model('Events')
        query = recorder.query('Events').filter(
            (events.time_fired > start_day) &
            (events.time_fired < end_day))
        events = recorder.execute(query)

        return self.json(humanify(events))
Beispiel #7
0
    def get_next_interval(self, now=None):
        """Compute next time an update should occur."""
        interval = self._tts_scan_interval

        if now is None:
            now = dt_util.utcnow()
        if interval == 86460 or interval is None:
            now = dt_util.start_of_local_day(dt_util.as_local(now))
        return now + timedelta(seconds=interval)
class LitterRobotControlEntity(LitterRobotEntity):
    """A Litter-Robot entity that can control the unit."""
    def __init__(self, robot: Robot, entity_type: str,
                 hub: LitterRobotHub) -> None:
        """Init a Litter-Robot control entity."""
        super().__init__(robot=robot, entity_type=entity_type, hub=hub)
        self._refresh_callback: CALLBACK_TYPE | None = None

    async def perform_action_and_refresh(self, action: MethodType, *args: Any,
                                         **kwargs: Any) -> bool:
        """Perform an action and initiates a refresh of the robot data after a few seconds."""
        success = False

        try:
            success = await action(*args, **kwargs)
        except InvalidCommandException as ex:  # pragma: no cover
            # this exception should only occur if the underlying API for commands changes
            _LOGGER.error(ex)
            success = False

        if success:
            self.async_cancel_refresh_callback()
            self._refresh_callback = async_call_later(
                self.hass, REFRESH_WAIT_TIME_SECONDS,
                self.async_call_later_callback)
        return success

    async def async_call_later_callback(self, *_) -> None:
        """Perform refresh request on callback."""
        self._refresh_callback = None
        await self.coordinator.async_request_refresh()

    async def async_will_remove_from_hass(self) -> None:
        """Cancel refresh callback when entity is being removed from hass."""
        self.async_cancel_refresh_callback()

    @callback
    def async_cancel_refresh_callback(self):
        """Clear the refresh callback if it has not already fired."""
        if self._refresh_callback is not None:
            self._refresh_callback()
            self._refresh_callback = None

    @staticmethod
    def parse_time_at_default_timezone(time_str: str | None) -> time | None:
        """Parse a time string and add default timezone."""
        if time_str is None:
            return None

        if (parsed_time := dt_util.parse_time(time_str)) is None:
            return None

        return (dt_util.start_of_local_day().replace(
            hour=parsed_time.hour,
            minute=parsed_time.minute,
            second=parsed_time.second,
        ).timetz())
def get_date(date: dict[str, Any]) -> datetime.datetime:
    """Get the dateTime from date or dateTime as a local."""
    if "date" in date:
        parsed_date = dt.parse_date(date["date"])
        assert parsed_date
        return dt.start_of_local_day(
            datetime.datetime.combine(parsed_date, datetime.time.min))
    parsed_datetime = dt.parse_datetime(date["dateTime"])
    assert parsed_datetime
    return dt.as_local(parsed_datetime)
async def schedule_future_update(hass, sensors, midnight_time,
                                 prayer_times_data):
    """Schedule future update for sensors.

    Midnight is a calculated time.  The specifics of the calculation
    depends on the method of the prayer time calculation.  This calculated
    midnight is the time at which the time to pray the Isha prayers have
    expired.

    Calculated Midnight: The Islamic midnight.
    Traditional Midnight: 12:00AM

    Update logic for prayer times:

    If the Calculated Midnight is before the traditional midnight then wait
    until the traditional midnight to run the update.  This way the day
    will have changed over and we don't need to do any fancy calculations.

    If the Calculated Midnight is after the traditional midnight, then wait
    until after the calculated Midnight.  We don't want to update the prayer
    times too early or else the timings might be incorrect.

    Example:
    calculated midnight = 11:23PM (before traditional midnight)
    Update time: 12:00AM

    calculated midnight = 1:35AM (after traditional midnight)
    update time: 1:36AM.
    """
    _LOGGER.debug("Scheduling next update for Islamic prayer times")

    now = dt_util.as_local(dt_util.now())
    today = now.date()

    midnight_dt_str = '{}::{}'.format(str(today), midnight_time)
    midnight_dt = datetime.strptime(midnight_dt_str, '%Y-%m-%d::%H:%M')

    if now > dt_util.as_local(midnight_dt):
        _LOGGER.debug("Midnight is after day the changes so schedule update "
                      "for after Midnight the next day")

        next_update_at = midnight_dt + timedelta(days=1, minutes=1)
    else:
        _LOGGER.debug(
            "Midnight is before the day changes so schedule update for the "
            "next start of day")

        tomorrow = now + timedelta(days=1)
        next_update_at = dt_util.start_of_local_day(tomorrow)

    _LOGGER.debug("Next update scheduled for: %s", str(next_update_at))

    async_track_point_in_time(hass,
                              update_sensors(hass, sensors, prayer_times_data),
                              next_update_at)
Beispiel #11
0
    async def get(self, request, datetime=None):
        """Retrieve logbook entries."""
        if datetime:
            datetime = dt_util.parse_datetime(datetime)

            if datetime is None:
                return self.json_message("Invalid datetime", HTTP_BAD_REQUEST)
        else:
            datetime = dt_util.start_of_local_day()

        period = request.query.get("period")
        if period is None:
            period = 1
        else:
            period = int(period)

        entity_ids = request.query.get("entity")
        if entity_ids:
            try:
                entity_ids = cv.entity_ids(entity_ids)
            except vol.Invalid:
                raise InvalidEntityFormatError(
                    f"Invalid entity id(s) encountered: {entity_ids}. "
                    "Format should be <domain>.<object_id>"
                ) from vol.Invalid

        end_time = request.query.get("end_time")
        if end_time is None:
            start_day = dt_util.as_utc(datetime) - timedelta(days=period - 1)
            end_day = start_day + timedelta(days=period)
        else:
            start_day = datetime
            end_day = dt_util.parse_datetime(end_time)
            if end_day is None:
                return self.json_message("Invalid end_time", HTTP_BAD_REQUEST)

        hass = request.app["hass"]

        entity_matches_only = "entity_matches_only" in request.query

        def json_events():
            """Fetch events and generate JSON."""
            return self.json(
                _get_events(
                    hass,
                    start_day,
                    end_day,
                    entity_ids,
                    self.filters,
                    self.entities_filter,
                    entity_matches_only,
                )
            )

        return await hass.async_add_executor_job(json_events)
Beispiel #12
0
def _handle_get_logbook(handler, path_match, data):
    """ Return logbook entries. """
    date_str = path_match.group("date")

    if date_str:
        start_date = dt_util.date_str_to_date(date_str)

        if start_date is None:
            handler.write_json_message("Error parsing JSON", HTTP_BAD_REQUEST)
            return

        start_day = dt_util.start_of_local_day(start_date)
    else:
        start_day = dt_util.start_of_local_day()

    end_day = start_day + timedelta(days=1)

    events = recorder.query_events(QUERY_EVENTS_BETWEEN, (dt_util.as_utc(start_day), dt_util.as_utc(end_day)))

    handler.write_json(humanify(events))
Beispiel #13
0
    def update(self):
        """Get the latest data."""
        # We have to retrieve the results for the whole day as the server
        # won't return events that have already started
        results = self.calendar.date_search(
            dt.start_of_local_day(),
            dt.start_of_local_day() + timedelta(days=1)
        )

        # dtstart can be a date or datetime depending if the event lasts a
        # whole day. Convert everything to datetime to be able to sort it
        results.sort(key=lambda x: self.to_datetime(
            x.instance.vevent.dtstart.value
        ))

        vevent = next((
            event.instance.vevent for event in results
            if (self.is_matching(event.instance.vevent, self.search)
                and (not self.is_all_day(event.instance.vevent)
                     or self.include_all_day)
                and not self.is_over(event.instance.vevent))), None)

        # If no matching event could be found
        if vevent is None:
            _LOGGER.debug(
                "No matching event found in the %d results for %s",
                len(results),
                self.calendar.name,
            )
            self.event = None
            return True

        # Populate the entity attributes with the event values
        self.event = {
            "summary": vevent.summary.value,
            "start": self.get_hass_date(vevent.dtstart.value),
            "end": self.get_hass_date(vevent.dtend.value),
            "location": self.get_attr_value(vevent, "location"),
            "description": self.get_attr_value(vevent, "description")
        }
        return True
Beispiel #14
0
    def get(self, request, datetime=None):
        """Retrieve logbook entries."""
        start_day = dt_util.as_utc(datetime or dt_util.start_of_local_day())
        end_day = start_day + timedelta(days=1)

        events = recorder.get_model('Events')
        query = recorder.query('Events').filter(
            (events.time_fired > start_day) &
            (events.time_fired < end_day))
        events = recorder.execute(query)

        return self.json(humanify(events))
Beispiel #15
0
    def parse_time_at_default_timezone(time_str: str) -> time | None:
        """Parse a time string and add default timezone."""
        parsed_time = dt_util.parse_time(time_str)

        if parsed_time is None:
            return None

        return (dt_util.start_of_local_day().replace(
            hour=parsed_time.hour,
            minute=parsed_time.minute,
            second=parsed_time.second,
        ).timetz())
Beispiel #16
0
    async def async_schedule_future_update(self):
        """Schedule future update for sensors.

        Midnight is a calculated time.  The specifics of the calculation
        depends on the method of the prayer time calculation.  This calculated
        midnight is the time at which the time to pray the Isha prayers have
        expired.

        Calculated Midnight: The Islamic midnight.
        Traditional Midnight: 12:00AM

        Update logic for prayer times:

        If the Calculated Midnight is before the traditional midnight then wait
        until the traditional midnight to run the update.  This way the day
        will have changed over and we don't need to do any fancy calculations.

        If the Calculated Midnight is after the traditional midnight, then wait
        until after the calculated Midnight.  We don't want to update the prayer
        times too early or else the timings might be incorrect.

        Example:
        calculated midnight = 11:23PM (before traditional midnight)
        Update time: 12:00AM

        calculated midnight = 1:35AM (after traditional midnight)
        update time: 1:36AM.

        """
        _LOGGER.debug("Scheduling next update for Islamic prayer times")

        now = dt_util.as_local(dt_util.now())

        midnight_dt = self.prayer_times_info["Midnight"]

        if now > dt_util.as_local(midnight_dt):
            next_update_at = midnight_dt + timedelta(days=1, minutes=1)
            _LOGGER.debug(
                "Midnight is after day the changes so schedule update for after Midnight the next day"
            )
        else:
            _LOGGER.debug(
                "Midnight is before the day changes so schedule update for the next start of day"
            )
            next_update_at = dt_util.start_of_local_day(now +
                                                        timedelta(days=1))

        _LOGGER.info("Next update scheduled for: %s", next_update_at)

        self.event_unsub = async_track_point_in_time(self.hass,
                                                     self.async_update,
                                                     next_update_at)
Beispiel #17
0
    async def get(self, request, datetime=None):
        """Retrieve logbook entries."""
        if datetime:
            datetime = dt_util.parse_datetime(datetime)

            if datetime is None:
                return self.json_message("Invalid datetime",
                                         HTTPStatus.BAD_REQUEST)
        else:
            datetime = dt_util.start_of_local_day()

        if (period := request.query.get("period")) is None:
            period = 1
Beispiel #18
0
    def update(self):
        """Get the latest data."""
        # We have to retrieve the results for the whole day as the server
        # won't return events that have already started
        results = self.calendar.date_search(
            dt.start_of_local_day(),
            dt.start_of_local_day() + timedelta(days=1)
        )

        # dtstart can be a date or datetime depending if the event lasts a
        # whole day. Convert everything to datetime to be able to sort it
        results.sort(key=lambda x: self.to_datetime(
            x.instance.vevent.dtstart.value
        ))

        vevent = next((
            event.instance.vevent for event in results
            if (self.is_matching(event.instance.vevent, self.search)
                and (not self.is_all_day(event.instance.vevent)
                     or self.include_all_day)
                and not self.is_over(event.instance.vevent))), None)

        # If no matching event could be found
        if vevent is None:
            _LOGGER.debug(
                "No matching event found in the %d results for %s",
                len(results), self.calendar.name)
            self.event = None
            return True

        # Populate the entity attributes with the event values
        self.event = {
            "summary": vevent.summary.value,
            "start": self.get_hass_date(vevent.dtstart.value),
            "end": self.get_hass_date(self.get_end_date(vevent)),
            "location": self.get_attr_value(vevent, "location"),
            "description": self.get_attr_value(vevent, "description")
        }
        return True
Beispiel #19
0
def _handle_get_logbook(handler, path_match, data):
    """ Return logbook entries. """
    date_str = path_match.group('date')

    if date_str:
        start_date = dt_util.date_str_to_date(date_str)

        if start_date is None:
            handler.write_json_message("Error parsing JSON", HTTP_BAD_REQUEST)
            return

        start_day = dt_util.start_of_local_day(start_date)
    else:
        start_day = dt_util.start_of_local_day()

    end_day = start_day + timedelta(days=1)

    events = recorder.query_events(
        QUERY_EVENTS_BETWEEN,
        (dt_util.as_utc(start_day), dt_util.as_utc(end_day)))

    handler.write_json(humanify(events))
Beispiel #20
0
 def get_next_interval(self, now=None):
     """Compute next time an update should occur."""
     if now is None:
         now = dt_util.utcnow()
     if self.type == 'date':
         now = dt_util.start_of_local_day(dt_util.as_local(now))
         return now + timedelta(seconds=86400)
     if self.type == 'beat':
         interval = 86.4
     else:
         interval = 60
     timestamp = int(dt_util.as_timestamp(now))
     delta = interval - (timestamp % interval)
     return now + timedelta(seconds=delta)
Beispiel #21
0
 def update(self):
     """Update state of sensor."""
     self._last_reset = dt_util.start_of_local_day()
     try:
         with suppress(PyViCareNotSupportedFeatureError):
             self._state = self.entity_description.value_getter(self._api)
     except requests.exceptions.ConnectionError:
         _LOGGER.error("Unable to retrieve data from ViCare server")
     except ValueError:
         _LOGGER.error("Unable to decode data from ViCare server")
     except PyViCareRateLimitError as limit_exception:
         _LOGGER.error("Vicare API rate limit exceeded: %s", limit_exception)
     except PyViCareInvalidDataError as invalid_data_exception:
         _LOGGER.error("Invalid data from Vicare server: %s", invalid_data_exception)
Beispiel #22
0
    def get(self, request, date=None):
        """Return history over a period of time."""
        one_day = timedelta(days=1)

        if date:
            start_time = dt_util.as_utc(dt_util.start_of_local_day(date))
        else:
            start_time = dt_util.utcnow() - one_day

        end_time = start_time + one_day
        entity_id = request.args.get('filter_entity_id')

        return self.json(
            get_significant_states(start_time, end_time, entity_id).values())
Beispiel #23
0
    def get(self, request, date=None):
        """Return history over a period of time."""
        one_day = timedelta(days=1)

        if date:
            start_time = dt_util.as_utc(dt_util.start_of_local_day(date))
        else:
            start_time = dt_util.utcnow() - one_day

        end_time = start_time + one_day
        entity_id = request.args.get('filter_entity_id')

        return self.json(
            get_significant_states(start_time, end_time, entity_id).values())
Beispiel #24
0
def calculate_datetime(entry: dict, sun_data):
    """Get datetime object with closest occurance based on time + weekdays input"""

    if "time" in entry:
        time = dt_util.parse_time(entry["time"])

        today = dt_util.start_of_local_day()
        nexttime = dt_util.as_utc(datetime.datetime.combine(today, time))

    elif "event" in entry:
        if not sun_data:
            _LOGGER.error("no sun data available")
            return

        offset_sign = entry["offset"][0]
        offset_string = entry["offset"][1:]

        time_offset = datetime.datetime.strptime(offset_string, "%H:%M")
        time_offset = datetime.timedelta(hours=time_offset.hour,
                                         minutes=time_offset.minute)

        time_sun = (sun_data["sunrise"]
                    if entry["event"] == "sunrise" else sun_data["sunset"])
        time_sun = datetime.datetime.strptime(
            time_sun[:len(time_sun) - 3] + time_sun[len(time_sun) - 2:],
            "%Y-%m-%dT%H:%M:%S%z",
        )

        if offset_sign == "+":
            nexttime = time_sun + time_offset
        else:
            nexttime = time_sun - time_offset

    now = dt_util.now().replace(microsecond=0)

    # check if time has already passed for today
    delta = nexttime - now
    while delta.total_seconds() <= 0:
        nexttime = nexttime + datetime.timedelta(days=1)
        delta = nexttime - now

    # check if timer is restricted in days of the week
    day_list = entry["days"]
    if len(day_list) > 0 and not 0 in day_list:
        weekday = dt_util.as_local(nexttime).isoweekday()
        while weekday not in day_list:
            nexttime = nexttime + datetime.timedelta(days=1)
            weekday = dt_util.as_local(nexttime).isoweekday()

    return nexttime
Beispiel #25
0
    def update(self):
        """Get the latest data."""
        service = self.calendar_service.get()
        params = dict(DEFAULT_GOOGLE_SEARCH_PARAMS)
        params['timeMin'] = dt.start_of_local_day().isoformat('T')
        params['calendarId'] = self.calendar_id
        if self.search:
            params['q'] = self.search

        events = service.events()  # pylint: disable=no-member
        result = events.list(**params).execute()

        items = result.get('items', [])
        self.event = items[0] if len(items) == 1 else None
        return True
Beispiel #26
0
def get_tide_xml_url(
    lat=58.974339,
    lon=5.730121,
    time_from=None,
    time_to=None,
    datatype="tab",
    interval=10,
) -> str:

    if time_from is None:
        time_from = dt_utils.start_of_local_day()
        time_from_str = time_from.strftime("%Y-%m-%dT%H:%M:%S.%f%z")

    if time_to is None:
        time_to = dt_utils.start_of_local_day() + timedelta(days=1, hours=23, minutes=59, seconds=59)
        time_to_str = time_to.strftime("%Y-%m-%dT%H:%M:%S.%f%z")

    url = (
        f"http://api.sehavniva.no/tideapi.php?lat={lat}&lon={lon}&fromtime={time_from_str}"
        f"&totime={time_to_str}&datatype={datatype}&refcode=cd&place=&file=&lang=en&"
        f"interval={interval}&dst=0&tzone=1&tide_request=locationdata"
    )

    return url
Beispiel #27
0
    def update(self):
        """Get the latest data."""
        service = self.calendar_service.get()
        params = dict(DEFAULT_GOOGLE_SEARCH_PARAMS)
        params['timeMin'] = dt.start_of_local_day().isoformat('T')
        params['calendarId'] = self.calendar_id
        if self.search:
            params['q'] = self.search

        events = service.events()  # pylint: disable=no-member
        result = events.list(**params).execute()

        items = result.get('items', [])
        self.event = items[0] if len(items) == 1 else None
        return True
Beispiel #28
0
    async def get(self, request, datetime=None):
        """Retrieve logbook entries."""
        if datetime:
            datetime = dt_util.parse_datetime(datetime)

            if datetime is None:
                return self.json_message("Invalid datetime", HTTP_BAD_REQUEST)
        else:
            datetime = dt_util.start_of_local_day()

        period = request.query.get("period")
        if period is None:
            period = 1
        else:
            period = int(period)

        entity_id = request.query.get("entity")

        end_time = request.query.get("end_time")
        if end_time is None:
            start_day = dt_util.as_utc(datetime) - timedelta(days=period - 1)
            end_day = start_day + timedelta(days=period)
        else:
            start_day = datetime
            end_day = dt_util.parse_datetime(end_time)
            if end_day is None:
                return self.json_message("Invalid end_time", HTTP_BAD_REQUEST)

        hass = request.app["hass"]

        entity_matches_only = "entity_matches_only" in request.query

        def json_events():
            """Fetch events and generate JSON."""
            return self.json(
                _get_events(
                    hass,
                    start_day,
                    end_day,
                    entity_id,
                    self.filters,
                    self.entities_filter,
                    entity_matches_only,
                ))

        return await hass.async_add_executor_job(json_events)
Beispiel #29
0
    def get(self, request, datetime=None):
        """Retrieve logbook entries."""
        if datetime:
            datetime = dt_util.parse_datetime(datetime)

            if datetime is None:
                return self.json_message('Invalid datetime', HTTP_BAD_REQUEST)
        else:
            datetime = dt_util.start_of_local_day()

        start_day = dt_util.as_utc(datetime)
        end_day = start_day + timedelta(days=1)
        hass = request.app['hass']

        events = yield from hass.async_add_job(
            _get_events, hass, self.config, start_day, end_day)
        return self.json(events)
Beispiel #30
0
    def get(self, request, datetime=None):
        """Retrieve logbook entries."""
        if datetime:
            datetime = dt_util.parse_datetime(datetime)

            if datetime is None:
                return self.json_message('Invalid datetime', HTTP_BAD_REQUEST)
        else:
            datetime = dt_util.start_of_local_day()

        start_day = dt_util.as_utc(datetime)
        end_day = start_day + timedelta(days=1)
        hass = request.app['hass']

        events = yield from hass.async_add_job(_get_events, hass, self.config,
                                               start_day, end_day)
        return self.json(events)
Beispiel #31
0
    def get(self, request, datetime=None):
        """Retrieve logbook entries."""
        if datetime:
            datetime = dt_util.parse_datetime(datetime)

            if datetime is None:
                return self.json_message('Invalid datetime', HTTP_BAD_REQUEST)
        else:
            datetime = dt_util.start_of_local_day()

        start_day = dt_util.as_utc(datetime)
        end_day = start_day + timedelta(days=1)
        hass = request.app['hass']

        events = yield from hass.loop.run_in_executor(
            None, _get_events, hass, start_day, end_day)
        events = _exclude_events(events, self.config)
        return self.json(humanify(events))
Beispiel #32
0
    def get(self, request, datetime=None):
        """Retrieve logbook entries."""
        if datetime:
            datetime = dt_util.parse_datetime(datetime)

            if datetime is None:
                return self.json_message('Invalid datetime', HTTP_BAD_REQUEST)
        else:
            datetime = dt_util.start_of_local_day()

        start_day = dt_util.as_utc(datetime)
        end_day = start_day + timedelta(days=1)
        hass = request.app['hass']

        events = yield from hass.loop.run_in_executor(None, _get_events, hass,
                                                      start_day, end_day)
        events = _exclude_events(events, self.config)
        return self.json(humanify(events))
 def __init__(self, irrigation_id, attributes, entity_ids, component):
     """Initialize a Irrigation program."""
     self.entity_id = irrigation_id
     self._attributes = attributes
     self._component = component
     self._name = attributes.get(ATTR_NAME)
     self._zones = attributes.get(ATTR_ZONES)
     self._entities = entity_ids
     self._stop = False
     """ default to today for new programs """
     now = dt_util.utcnow()
     time_date = dt_util.start_of_local_day(dt_util.as_local(now))
     self._last_run = dt_util.as_local(time_date).date().isoformat()
     self._template = attributes.get(ATTR_TEMPLATE)
     self._running = False
     self._running_zone = None
     self._state_attributes = {'days_since': self._last_run}
     self._eval_zones = True
     self._run_program = None
Beispiel #34
0
    async def update_data(self, *_):
        from datetime import date, timedelta

        today = dt_util.start_of_local_day().date()
        next_flagday = date(today.year, self._date_of_flag.month, self._date_of_flag.day)

        if next_flagday < today:
            next_flagday = next_flagday.replace(year=today.year + 1)

        days_until_next_flagday = (next_flagday-today).days

        self._age_at_next_flagday = next_flagday.year - self._date_of_flag.year
        self._state = days_until_next_flagday

        if days_until_next_flagday == 0:
            # Fire event if flagday is today
            self.hass.bus.async_fire(event_type='flagday', event_data={'name': self._name, 'age': self._age_at_next_flagday})

        await self.async_update_ha_state()
        async_call_later(self.hass, self._get_seconds_until_midnight(), self.update_data)
Beispiel #35
0
    def get_daily_usage(self):
        """Return current daily power usage."""
        kwh = 0
        start_time = dt_util.start_of_local_day().astimezone(dt_util.UTC).isoformat()
        end_time = dt_util.utcnow().isoformat()

        _LOGGER.debug("Start: %s, End: %s", start_time, end_time)

        try:
            history = self.neurio_client.get_samples_stats(
                self.sensor_id, start_time, "days", end_time
            )
        except (requests.exceptions.RequestException, ValueError, KeyError):
            _LOGGER.warning("Could not update daily power usage")
            return None

        for result in history:
            kwh += result["consumptionEnergy"] / 3600000

        self._daily_usage = round(kwh, 2)
Beispiel #36
0
    async def get(self, request, datetime=None):
        """Retrieve logbook entries."""
        if datetime:
            datetime = dt_util.parse_datetime(datetime)

            if datetime is None:
                return self.json_message('Invalid datetime', HTTP_BAD_REQUEST)
        else:
            datetime = dt_util.start_of_local_day()

        start_day = dt_util.as_utc(datetime)
        end_day = start_day + timedelta(days=1)
        hass = request.app['hass']

        def json_events():
            """Fetch events and generate JSON."""
            return self.json(list(
                _get_events(hass, self.config, start_day, end_day)))

        return await hass.async_add_job(json_events)
Beispiel #37
0
    def get_daily_usage(self):
        """Return current daily power usage."""
        kwh = 0
        start_time = dt_util.start_of_local_day() \
            .astimezone(dt_util.UTC).isoformat()
        end_time = dt_util.utcnow().isoformat()

        _LOGGER.debug('Start: %s, End: %s', start_time, end_time)

        try:
            history = self.neurio_client.get_samples_stats(
                self.sensor_id, start_time, 'days', end_time)
        except (requests.exceptions.RequestException, ValueError, KeyError):
            _LOGGER.warning("Could not update daily power usage")
            return None

        for result in history:
            kwh += result['consumptionEnergy'] / 3600000

        self._daily_usage = round(kwh, 2)
Beispiel #38
0
    async def async_tcs_svc_request(self, service: dict, data: dict) -> None:
        """Process a service request (system mode) for a controller.

        Data validation is not required, it will have been done upstream.
        """
        if service == SVC_SET_SYSTEM_MODE:
            mode = data[ATTR_SYSTEM_MODE]
        else:  # otherwise it is SVC_RESET_SYSTEM
            mode = EVO_RESET

        if ATTR_DURATION_DAYS in data:
            until = dt_util.start_of_local_day()
            until += data[ATTR_DURATION_DAYS]

        elif ATTR_DURATION_HOURS in data:
            until = dt_util.now() + data[ATTR_DURATION_HOURS]

        else:
            until = None

        await self._set_tcs_mode(mode, until=until)
Beispiel #39
0
    def get_next_interval(self):
        """Compute next time an update should occur."""
        now = dt_util.utcnow()

        if self.type == "date":
            tomorrow = dt_util.as_local(now) + timedelta(days=1)
            return dt_util.start_of_local_day(tomorrow)

        if self.type == "beat":
            # Add 1 hour because @0 beats is at 23:00:00 UTC.
            timestamp = dt_util.as_timestamp(now + timedelta(hours=1))
            interval = 86.4
        else:
            timestamp = dt_util.as_timestamp(now)
            interval = 60

        delta = interval - (timestamp % interval)
        next_interval = now + timedelta(seconds=delta)
        _LOGGER.debug("%s + %s -> %s (%s)", now, delta, next_interval, self.type)

        return next_interval
    async def async_update(self) -> None:
        """Update entity."""
        key = self.entity_description.key

        if key == "diskspace":
            await self.sonarr.update()
        elif key == "commands":
            self.data[key] = await self.sonarr.commands()
        elif key == "queue":
            self.data[key] = await self.sonarr.queue()
        elif key == "series":
            self.data[key] = await self.sonarr.series()
        elif key == "upcoming":
            local = dt_util.start_of_local_day().replace(microsecond=0)
            start = dt_util.as_utc(local)
            end = start + timedelta(days=self.upcoming_days)

            self.data[key] = await self.sonarr.calendar(
                start=start.isoformat(), end=end.isoformat()
            )
        elif key == "wanted":
            self.data[key] = await self.sonarr.wanted(page_size=self.wanted_max_items)
Beispiel #41
0
def _api_history_period(handler, path_match, data):
    """ Return history over a period of time. """
    date_str = path_match.group('date')
    one_day = timedelta(seconds=86400)

    if date_str:
        start_date = dt_util.date_str_to_date(date_str)

        if start_date is None:
            handler.write_json_message("Error parsing JSON", HTTP_BAD_REQUEST)
            return

        start_time = dt_util.as_utc(dt_util.start_of_local_day(start_date))
    else:
        start_time = dt_util.utcnow() - one_day

    end_time = start_time + one_day

    entity_id = data.get('filter_entity_id')

    handler.write_json(
        get_significant_states(start_time, end_time, entity_id).values())
Beispiel #42
0
def _api_history_period(handler, path_match, data):
    """ Return history over a period of time. """
    date_str = path_match.group('date')
    one_day = timedelta(seconds=86400)

    if date_str:
        start_date = dt_util.date_str_to_date(date_str)

        if start_date is None:
            handler.write_json_message("Error parsing JSON", HTTP_BAD_REQUEST)
            return

        start_time = dt_util.as_utc(dt_util.start_of_local_day(start_date))
    else:
        start_time = dt_util.utcnow() - one_day

    end_time = start_time + one_day

    entity_id = data.get('filter_entity_id')

    handler.write_json(
        get_significant_states(start_time, end_time, entity_id).values())
Beispiel #43
0
def extract_attrs(data) -> dict:
    """
    Peak = 08:00 to 20:00
    Off peak 1 = 00:00 to 08:00
    off peak 2 = 20:00 to 00:00
    """
    items = []
    d = defaultdict(list)

    peak_start = start_of(dt_utils.now().replace(hour=8), "hour")
    peak_end = end_of(dt_utils.now().replace(hour=19), "hour")
    offpeek1_start = dt_utils.start_of_local_day()
    offpeek1_end = end_of(offpeek1_start.replace(hour=7), "hour")
    offpeek2_start = start_of(dt_utils.now().replace(hour=8), "hour")
    offpeek2_end = end_of(dt_utils.now().replace(hour=23), "hour")

    for item in data:
        curr = dt_utils.as_local(item.get("start"))

        if time_in_range(peak_start, peak_end, curr):
            d["peak"].append(item.get("value"))

        elif time_in_range(offpeek1_start, offpeek1_end, curr):
            d["offpeek1"].append(item.get("value"))

        elif time_in_range(offpeek2_start, offpeek2_end, curr):
            d["offpeek2"].append(item.get("value"))

        items.append(item.get("value"))

    d["Peak"] = mean(d["peak"])
    d["Off-peak 1"] = mean(d["offpeek1"])
    d["Off-peak 2"] = mean(d["offpeek2"])
    d["Average"] = mean(items)
    d["Min"] = min(items)
    d["Max"] = max(items)

    return dict(d)
Beispiel #44
0
    async def fetching_data(self, *_):
        import json

        def retry(err: str):
            minutes = 60
            _LOGGER.error("Retrying in %i minutes: %s", minutes, err)
            async_call_later(self.hass, minutes * 60, self.fetching_data)

        def get_seconds_until_midnight():
            one_day_in_seconds = 24 * 60 * 60

            now = dt_util.now()
            total_seconds_passed_today = (now.hour * 60 *
                                          60) + (now.minute * 60) + now.second

            return one_day_in_seconds - total_seconds_passed_today

        try:
            with open(self._theme_path, 'r') as dataFile:
                data = json.load(dataFile)
        except json.JSONDecodeError as err:
            retry(err)
            return

        specialThemes = data['themeDays']
        date = dt_util.start_of_local_day()
        dateStr = date.strftime('%Y%m%d')

        if dateStr in specialThemes:
            events = map(lambda x: x['event'], specialThemes[dateStr])
            self._state = ",".join(events)

            tasks = [self.async_update_ha_state()]
            await asyncio.wait(tasks, loop=self.hass.loop)

        async_call_later(self.hass, get_seconds_until_midnight(),
                         self.fetching_data)
Beispiel #45
0
async def schedule_future_update(hass, sensors, midnight_time,
                                 prayer_times_data):
    """Schedule future update for sensors.

    Midnight is a calculated time.  The specifics of the calculation
    depends on the method of the prayer time calculation.  This calculated
    midnight is the time at which the time to pray the Isha prayers have
    expired.

    Calculated Midnight: The Islamic midnight.
    Traditional Midnight: 12:00AM

    Update logic for prayer times:

    If the Calculated Midnight is before the traditional midnight then wait
    until the traditional midnight to run the update.  This way the day
    will have changed over and we don't need to do any fancy calculations.

    If the Calculated Midnight is after the traditional midnight, then wait
    until after the calculated Midnight.  We don't want to update the prayer
    times too early or else the timings might be incorrect.

    Example:
    calculated midnight = 11:23PM (before traditional midnight)
    Update time: 12:00AM

    calculated midnight = 1:35AM (after traditional midnight)
    update time: 1:36AM.
    """
    _LOGGER.debug("Scheduling next update for Islamic prayer times")

    now = dt_util.as_local(dt_util.now())
    today = now.date()

    midnight_dt_str = '{}::{}'.format(str(today), midnight_time)
    midnight_dt = datetime.strptime(midnight_dt_str, '%Y-%m-%d::%H:%M')

    if now > dt_util.as_local(midnight_dt):
        _LOGGER.debug("Midnight is after day the changes so schedule update "
                      "for after Midnight the next day")

        next_update_at = midnight_dt + timedelta(days=1, minutes=1)
    else:
        _LOGGER.debug(
            "Midnight is before the day changes so schedule update for the "
            "next start of day")

        tomorrow = now + timedelta(days=1)
        next_update_at = dt_util.start_of_local_day(tomorrow)

    _LOGGER.debug("Next update scheduled for: %s", str(next_update_at))

    async def update_sensors(_):
        """Update sensors with new prayer times."""
        # Update prayer times
        prayer_times = prayer_times_data.get_new_prayer_times()

        _LOGGER.debug("New prayer times retrieved.  Updating sensors.")

        # Update all prayer times sensors
        for sensor in sensors:
            sensor.async_schedule_update_ha_state(True)

        # Schedule next update
        await schedule_future_update(hass, sensors, prayer_times['Midnight'],
                                     prayer_times_data)

    async_track_point_in_time(hass,
                              update_sensors,
                              next_update_at)