示例#1
0
    async def get(self, request, datetime=None):
        """Return history over a period of time."""
        timer_start = time.perf_counter()
        if datetime:
            datetime = dt_util.parse_datetime(datetime)

            if datetime is None:
                return self.json_message('Invalid datetime', HTTP_BAD_REQUEST)

        now = dt_util.utcnow()

        one_day = timedelta(days=1)
        if datetime:
            start_time = dt_util.as_utc(datetime)
        else:
            start_time = now - one_day

        if start_time > now:
            return self.json([])

        end_time = request.query.get('end_time')
        if end_time:
            end_time = dt_util.parse_datetime(end_time)
            if end_time:
                end_time = dt_util.as_utc(end_time)
            else:
                return self.json_message('Invalid end_time', HTTP_BAD_REQUEST)
        else:
            end_time = start_time + one_day
        entity_ids = request.query.get('filter_entity_id')
        if entity_ids:
            entity_ids = entity_ids.lower().split(',')
        include_start_time_state = 'skip_initial_state' not in request.query

        hass = request.app['hass']

        result = await hass.async_add_job(
            get_significant_states, hass, start_time, end_time,
            entity_ids, self.filters, include_start_time_state)
        result = list(result.values())
        if _LOGGER.isEnabledFor(logging.DEBUG):
            elapsed = time.perf_counter() - timer_start
            _LOGGER.debug(
                'Extracted %d states in %fs', sum(map(len, result)), elapsed)

        # Optionally reorder the result to respect the ordering given
        # by any entities explicitly included in the configuration.

        if self.use_include_order:
            sorted_result = []
            for order_entity in self.filters.included_entities:
                for state_list in result:
                    if state_list[0].entity_id == order_entity:
                        sorted_result.append(state_list)
                        result.remove(state_list)
                        break
            sorted_result.extend(result)
            result = sorted_result

        return await hass.async_add_job(self.json, result)
示例#2
0
    async def _update(self, *_):
        """Get the latest data from Met.no."""
        import metno
        if self._weather_data is None:
            return

        now = dt_util.utcnow()

        ordered_entries = []
        for time_entry in self._weather_data.data['product']['time']:
            valid_from = dt_util.parse_datetime(time_entry['@from'])
            valid_to = dt_util.parse_datetime(time_entry['@to'])

            if now >= valid_to:
                # Has already passed. Never select this.
                continue

            average_dist = (abs((valid_to - now).total_seconds()) +
                            abs((valid_from - now).total_seconds()))

            ordered_entries.append((average_dist, time_entry))

        if not ordered_entries:
            return
        ordered_entries.sort(key=lambda item: item[0])

        self._temperature = metno.get_forecast('temperature', ordered_entries)
        self._condition = CONDITIONS.get(metno.get_forecast('symbol',
                                                            ordered_entries))
        self._pressure = metno.get_forecast('pressure', ordered_entries)
        self._humidity = metno.get_forecast('humidity', ordered_entries)
        self._wind_speed = metno.get_forecast('windSpeed', ordered_entries)
        self._wind_bearing = metno.get_forecast('windDirection',
                                                ordered_entries)
        self.async_schedule_update_ha_state()
示例#3
0
    def from_dict(cls, json_dict: Dict) -> Any:
        """Initialize a state from a dict.

        Async friendly.

        Ensures: state == State.from_json_dict(state.to_json_dict())
        """
        if not (json_dict and 'entity_id' in json_dict and
                'state' in json_dict):
            return None

        last_changed = json_dict.get('last_changed')

        if isinstance(last_changed, str):
            last_changed = dt_util.parse_datetime(last_changed)

        last_updated = json_dict.get('last_updated')

        if isinstance(last_updated, str):
            last_updated = dt_util.parse_datetime(last_updated)

        context = json_dict.get('context')
        if context:
            context = Context(
                id=context.get('id'),
                user_id=context.get('user_id'),
            )

        return cls(json_dict['entity_id'], json_dict['state'],
                   json_dict.get('attributes'), last_changed, last_updated,
                   context)
示例#4
0
    async def async_load(self):
        """Load the users."""
        data = await self._store.async_load()

        # Make sure that we're not overriding data if 2 loads happened at the
        # same time
        if self._users is not None:
            return

        if data is None:
            self._users = {}
            self._clients = {}
            return

        users = {
            user_dict['id']: User(**user_dict) for user_dict in data['users']
        }

        for cred_dict in data['credentials']:
            users[cred_dict['user_id']].credentials.append(Credentials(
                id=cred_dict['id'],
                is_new=False,
                auth_provider_type=cred_dict['auth_provider_type'],
                auth_provider_id=cred_dict['auth_provider_id'],
                data=cred_dict['data'],
            ))

        refresh_tokens = {}

        for rt_dict in data['refresh_tokens']:
            token = RefreshToken(
                id=rt_dict['id'],
                user=users[rt_dict['user_id']],
                client_id=rt_dict['client_id'],
                created_at=dt_util.parse_datetime(rt_dict['created_at']),
                access_token_expiration=timedelta(
                    seconds=rt_dict['access_token_expiration']),
                token=rt_dict['token'],
            )
            refresh_tokens[token.id] = token
            users[rt_dict['user_id']].refresh_tokens[token.token] = token

        for ac_dict in data['access_tokens']:
            refresh_token = refresh_tokens[ac_dict['refresh_token_id']]
            token = AccessToken(
                refresh_token=refresh_token,
                created_at=dt_util.parse_datetime(ac_dict['created_at']),
                token=ac_dict['token'],
            )
            refresh_token.access_tokens.append(token)

        clients = {
            cl_dict['id']: Client(**cl_dict) for cl_dict in data['clients']
        }

        self._users = users
        self._clients = clients
示例#5
0
    def test_parse_datetime_converts_correctly(self):
        """Test parse_datetime converts strings."""
        assert \
            datetime(1986, 7, 9, 12, 0, 0, tzinfo=dt_util.UTC) == \
            dt_util.parse_datetime("1986-07-09T12:00:00Z")

        utcnow = dt_util.utcnow()

        assert utcnow == dt_util.parse_datetime(utcnow.isoformat())
示例#6
0
    def update_period(self):
        """Parse the templates and store a datetime tuple in _period."""
        start = None
        end = None

        # Parse start
        if self._start is not None:
            try:
                start_rendered = self._start.render()
            except (TemplateError, TypeError) as ex:
                HistoryStatsHelper.handle_template_exception(ex, 'start')
                return
            start = dt_util.parse_datetime(start_rendered)
            if start is None:
                try:
                    start = dt_util.as_local(dt_util.utc_from_timestamp(
                        math.floor(float(start_rendered))))
                except ValueError:
                    _LOGGER.error("Parsing error: start must be a datetime"
                                  "or a timestamp")
                    return

        # Parse end
        if self._end is not None:
            try:
                end_rendered = self._end.render()
            except (TemplateError, TypeError) as ex:
                HistoryStatsHelper.handle_template_exception(ex, 'end')
                return
            end = dt_util.parse_datetime(end_rendered)
            if end is None:
                try:
                    end = dt_util.as_local(dt_util.utc_from_timestamp(
                        math.floor(float(end_rendered))))
                except ValueError:
                    _LOGGER.error("Parsing error: end must be a datetime "
                                  "or a timestamp")
                    return

        # Calculate start or end using the duration
        if start is None:
            start = end - self._duration
        if end is None:
            end = start + self._duration

        if start > dt_util.now():
            # History hasn't been written yet for this period
            return
        if dt_util.now() < end:
            # No point in making stats of the future
            end = dt_util.now()

        self._period = start, end
示例#7
0
 async def async_update(self):
     """Update the state."""
     data = self.openuv.data[DATA_PROTECTION_WINDOW]['result']
     if self._sensor_type == TYPE_PROTECTION_WINDOW:
         self._state = parse_datetime(
             data['from_time']) <= utcnow() <= parse_datetime(
                 data['to_time'])
         self._attrs.update({
             ATTR_PROTECTION_WINDOW_ENDING_TIME:
                 as_local(parse_datetime(data['to_time'])),
             ATTR_PROTECTION_WINDOW_ENDING_UV: data['to_uv'],
             ATTR_PROTECTION_WINDOW_STARTING_UV: data['from_uv'],
             ATTR_PROTECTION_WINDOW_STARTING_TIME:
                 as_local(parse_datetime(data['from_time'])),
         })
示例#8
0
 async def get(self, request, entity_id):
     """Return calendar events."""
     entity = self.component.get_entity(entity_id)
     start = request.query.get('start')
     end = request.query.get('end')
     if None in (start, end, entity):
         return web.Response(status=400)
     try:
         start_date = dt.parse_datetime(start)
         end_date = dt.parse_datetime(end)
     except (ValueError, AttributeError):
         return web.Response(status=400)
     event_list = await entity.async_get_events(
         request.app['hass'], start_date, end_date)
     return self.json(event_list)
示例#9
0
 async def async_update(self):
     """Update the state."""
     data = self.openuv.data[DATA_UV]['result']
     if self._sensor_type == TYPE_CURRENT_OZONE_LEVEL:
         self._state = data['ozone']
     elif self._sensor_type == TYPE_CURRENT_UV_INDEX:
         self._state = data['uv']
     elif self._sensor_type == TYPE_CURRENT_UV_LEVEL:
         if data['uv'] >= 11:
             self._state = UV_LEVEL_EXTREME
         elif data['uv'] >= 8:
             self._state = UV_LEVEL_VHIGH
         elif data['uv'] >= 6:
             self._state = UV_LEVEL_HIGH
         elif data['uv'] >= 3:
             self._state = UV_LEVEL_MODERATE
         else:
             self._state = UV_LEVEL_LOW
     elif self._sensor_type == TYPE_MAX_UV_INDEX:
         self._state = data['uv_max']
         self._attrs.update({
             ATTR_MAX_UV_TIME: as_local(parse_datetime(data['uv_max_time']))
         })
     elif self._sensor_type in (TYPE_SAFE_EXPOSURE_TIME_1,
                                TYPE_SAFE_EXPOSURE_TIME_2,
                                TYPE_SAFE_EXPOSURE_TIME_3,
                                TYPE_SAFE_EXPOSURE_TIME_4,
                                TYPE_SAFE_EXPOSURE_TIME_5,
                                TYPE_SAFE_EXPOSURE_TIME_6):
         self._state = data['safe_exposure_time'][EXPOSURE_TYPE_MAP[
             self._sensor_type]]
示例#10
0
 def _update_callback(self):
     """Update the value of the sensor."""
     if self.is_updated:
         _LOGGER.debug('Update sensor value for %s', self)
         self._value = self.device.sensor(self.device_class)
         self._updated = parse_datetime(self.device.last_update)
         self.async_schedule_update_ha_state()
示例#11
0
    def test_state_change(self):
        """Test if the state changes at next setting/rising."""
        now = datetime(2016, 6, 1, 8, 0, 0, tzinfo=dt_util.UTC)
        with patch('homeassistant.helpers.condition.dt_util.utcnow',
                   return_value=now):
            setup_component(self.hass, sun.DOMAIN, {
                sun.DOMAIN: {sun.CONF_ELEVATION: 0}})

        self.hass.block_till_done()

        test_time = dt_util.parse_datetime(
            self.hass.states.get(sun.ENTITY_ID)
            .attributes[sun.STATE_ATTR_NEXT_RISING])
        self.assertIsNotNone(test_time)

        self.assertEqual(sun.STATE_BELOW_HORIZON,
                         self.hass.states.get(sun.ENTITY_ID).state)

        self.hass.bus.fire(ha.EVENT_TIME_CHANGED,
                           {ha.ATTR_NOW: test_time + timedelta(seconds=5)})

        self.hass.block_till_done()

        self.assertEqual(sun.STATE_ABOVE_HORIZON,
                         self.hass.states.get(sun.ENTITY_ID).state)
示例#12
0
 def _get_date(date):
     """Get the dateTime from date or dateTime as a local."""
     if 'date' in date:
         return dt.as_utc(dt.dt.datetime.combine(
             dt.parse_date(date['date']), dt.dt.time()))
     else:
         return dt.parse_datetime(date['dateTime'])
示例#13
0
    async def get(self, request, datetime=None):
        """Retrieve logbook entries."""
        if datetime:
            datetime = dt_util.parse_datetime(datetime)

            if datetime is None:
                return self.json_message('Invalid datetime', HTTP_BAD_REQUEST)
        else:
            datetime = dt_util.start_of_local_day()

        period = request.query.get('period')
        if period is None:
            period = 1
        else:
            period = int(period)

        entity_id = request.query.get('entity')
        start_day = dt_util.as_utc(datetime) - timedelta(days=period - 1)
        end_day = start_day + timedelta(days=period)
        hass = request.app['hass']

        def json_events():
            """Fetch events and generate JSON."""
            return self.json(list(
                _get_events(hass, self.config, start_day, end_day, entity_id)))

        return await hass.async_add_job(json_events)
示例#14
0
 def _update_current_price(self):
     state = None
     max_price = 0
     min_price = 10000
     sum_price = 0
     num = 0
     now = dt_util.now()
     for key, price_total in self._tibber_home.price_total.items():
         price_time = dt_util.as_local(dt_util.parse_datetime(key))
         price_total = round(price_total, 3)
         time_diff = (now - price_time).total_seconds()/60
         if (not self._last_data_timestamp or
                 price_time > self._last_data_timestamp):
             self._last_data_timestamp = price_time
         if 0 <= time_diff < 60:
             state = price_total
             self._last_updated = price_time
         if now.date() == price_time.date():
             max_price = max(max_price, price_total)
             min_price = min(min_price, price_total)
             num += 1
             sum_price += price_total
     self._state = state
     self._device_state_attributes['max_price'] = max_price
     self._device_state_attributes['avg_price'] = round(sum_price / num, 3)
     self._device_state_attributes['min_price'] = min_price
     return state is not None
示例#15
0
    async def async_update(self):
        """Get the latest data and updates the states."""
        now = dt_util.utcnow()
        if self._tibber_home.current_price_total and self._last_updated and \
           dt_util.as_utc(dt_util.parse_datetime(self._last_updated)).hour\
           == now.hour:
            return

        def _find_current_price():
            for key, price_total in self._tibber_home.price_total.items():
                price_time = dt_util.as_utc(dt_util.parse_datetime(key))
                time_diff = (now - price_time).total_seconds()/60
                if time_diff >= 0 and time_diff < 60:
                    self._state = round(price_total, 2)
                    self._last_updated = key
                    return True
            return False

        if _find_current_price():
            return

        _LOGGER.debug("No cached data found, so asking for new data")
        await self._tibber_home.update_info()
        await self._tibber_home.update_price_info()
        data = self._tibber_home.info['viewer']['home']
        self._device_state_attributes['app_nickname'] = data['appNickname']
        self._device_state_attributes['grid_company'] =\
            data['meteringPointData']['gridCompany']
        self._device_state_attributes['estimated_annual_consumption'] =\
            data['meteringPointData']['estimatedAnnualConsumption']
        _find_current_price()
示例#16
0
    def handle_new_task(call):
        """Call when a user creates a new Todoist Task from HASS."""
        project_name = call.data[PROJECT_NAME]
        project_id = project_id_lookup[project_name]

        # Create the task
        item = api.items.add(call.data[CONTENT], project_id)

        if LABELS in call.data:
            task_labels = call.data[LABELS]
            label_ids = [
                label_id_lookup[label.lower()]
                for label in task_labels]
            item.update(labels=label_ids)

        if PRIORITY in call.data:
            item.update(priority=call.data[PRIORITY])

        if DUE_DATE in call.data:
            due_date = dt.parse_datetime(call.data[DUE_DATE])
            if due_date is None:
                due = dt.parse_date(call.data[DUE_DATE])
                due_date = datetime(due.year, due.month, due.day)
            # Format it in the manner Todoist expects
            due_date = dt.as_utc(due_date)
            date_format = '%Y-%m-%dT%H:%M'
            due_date = datetime.strftime(due_date, date_format)
            item.update(due_date_utc=due_date)
        # Commit changes
        api.commit()
        _LOGGER.debug("Created Todoist task: %s", call.data[CONTENT])
示例#17
0
    def get(self, request, datetime=None):
        """Retrieve logbook entries."""
        if datetime:
            datetime = dt_util.parse_datetime(datetime)

            if datetime is None:
                return self.json_message('Invalid datetime', HTTP_BAD_REQUEST)
        else:
            datetime = dt_util.start_of_local_day()

        start_day = dt_util.as_utc(datetime)
        end_day = start_day + timedelta(days=1)

        def get_results():
            """Query DB for results."""
            events = recorder.get_model('Events')
            query = recorder.query('Events').order_by(
                events.time_fired).filter(
                    (events.time_fired > start_day) &
                    (events.time_fired < end_day))
            events = recorder.execute(query)
            return _exclude_events(events, self.config)

        events = yield from request.app['hass'].loop.run_in_executor(
            None, get_results)

        return self.json(humanify(events))
示例#18
0
 def _get_date(date):
     """Get the dateTime from date or dateTime as a local."""
     if 'date' in date:
         return dt.start_of_local_day(dt.dt.datetime.combine(
             dt.parse_date(date['date']), dt.dt.time.min))
     else:
         return dt.as_local(dt.parse_datetime(date['dateTime']))
示例#19
0
    def get(self, request, datetime=None):
        """Return history over a period of time."""
        if datetime:
            datetime = dt_util.parse_datetime(datetime)

            if datetime is None:
                return self.json_message('Invalid datetime', HTTP_BAD_REQUEST)

        now = dt_util.utcnow()

        one_day = timedelta(days=1)

        if datetime:
            start_time = dt_util.as_utc(datetime)
        else:
            start_time = now - one_day

        if start_time > now:
            return self.json([])

        end_time = start_time + one_day
        entity_id = request.GET.get('filter_entity_id')

        result = yield from request.app['hass'].loop.run_in_executor(
            None, get_significant_states, start_time, end_time, entity_id,
            self.filters)

        return self.json(result.values())
示例#20
0
    def update_period(self):
        """Parse the templates and store a datetime tuple in _period."""
        start = None
        end = None

        # Parse start
        if self._start is not None:
            try:
                start_rendered = self._start.render()
            except (TemplateError, TypeError) as ex:
                HistoryStatsHelper.handle_template_exception(ex, 'start')
                return
            start = dt_util.parse_datetime(start_rendered)
            if start is None:
                try:
                    start = dt_util.as_local(dt_util.utc_from_timestamp(
                        math.floor(float(start_rendered))))
                except ValueError:
                    _LOGGER.error('PARSING ERROR: start must be a datetime'
                                  ' or a timestamp.')
                    return

        # Parse end
        if self._end is not None:
            try:
                end_rendered = self._end.render()
            except (TemplateError, TypeError) as ex:
                HistoryStatsHelper.handle_template_exception(ex, 'end')
                return
            end = dt_util.parse_datetime(end_rendered)
            if end is None:
                try:
                    end = dt_util.as_local(dt_util.utc_from_timestamp(
                        math.floor(float(end_rendered))))
                except ValueError:
                    _LOGGER.error('PARSING ERROR: end must be a datetime'
                                  ' or a timestamp.')
                    return

        # Calculate start or end using the duration
        if start is None:
            start = end - self._duration
        if end is None:
            end = start + self._duration

        self._period = start, end
示例#21
0
        def to_python(self, value):
            """Validate and convert date."""
            parsed = dt_util.parse_datetime(value)

            if parsed is None:
                raise ValidationError()

            return parsed
示例#22
0
 async def _update_callback(self):
     """Update the value of the sensor."""
     if self.is_updated:
         _LOGGER.debug("Update sensor value for %s", self)
         self._value = await self.hass.async_add_executor_job(
             self.device.sensor, self.device_class)
         self._updated = parse_datetime(self.device.last_update)
         self.async_schedule_update_ha_state()
示例#23
0
async def test_awair_misc_attributes(hass):
    """Test that desired attributes are set."""
    await setup_awair(hass)

    attributes = hass.states.get("sensor.awair_co2").attributes
    assert attributes[ATTR_LAST_API_UPDATE] == parse_datetime(
        AIR_DATA_FIXTURE[0][ATTR_TIMESTAMP]
    )
示例#24
0
 def _find_current_price():
     for key, price_total in self._tibber_home.price_total.items():
         price_time = dt_util.as_utc(dt_util.parse_datetime(key))
         time_diff = (now - price_time).total_seconds()/60
         if time_diff >= 0 and time_diff < 60:
             self._state = round(price_total, 2)
             self._last_updated = key
             return True
     return False
示例#25
0
    def is_token_valid(self):
        """Check if a token is already loaded and if it is still valid."""
        if not self._prefs[STORAGE_ACCESS_TOKEN]:
            return False

        expire_time = dt.parse_datetime(self._prefs[STORAGE_EXPIRE_TIME])
        preemptive_expire_time = expire_time - timedelta(
            seconds=PREEMPTIVE_REFRESH_TTL_IN_SECONDS)

        return dt.utcnow() < preemptive_expire_time
示例#26
0
    def get(self, request, datetime=None):
        """Return history over a period of time."""
        timer_start = time.perf_counter()
        if datetime:
            datetime = dt_util.parse_datetime(datetime)

            if datetime is None:
                return self.json_message('Invalid datetime', HTTP_BAD_REQUEST)

        now = dt_util.utcnow()

        one_day = timedelta(days=1)
        if datetime:
            start_time = dt_util.as_utc(datetime)
        else:
            start_time = now - one_day

        if start_time > now:
            return self.json([])

        end_time = request.query.get('end_time')
        if end_time:
            end_time = dt_util.parse_datetime(end_time)
            if end_time:
                end_time = dt_util.as_utc(end_time)
            else:
                return self.json_message('Invalid end_time', HTTP_BAD_REQUEST)
        else:
            end_time = start_time + one_day
        entity_ids = request.query.get('filter_entity_id')
        if entity_ids:
            entity_ids = entity_ids.lower().split(',')
        include_start_time_state = 'skip_initial_state' not in request.query

        result = yield from request.app['hass'].async_add_job(
            get_significant_states, request.app['hass'], start_time, end_time,
            entity_ids, self.filters, include_start_time_state)
        result = result.values()
        if _LOGGER.isEnabledFor(logging.DEBUG):
            elapsed = time.perf_counter() - timer_start
            _LOGGER.debug(
                'Extracted %d states in %fs', sum(map(len, result)), elapsed)
        return self.json(result)
示例#27
0
    def update(self):
        """Get the latest data from yr.no and updates the states."""
        now = dt_util.utcnow()
        # Check if data should be updated
        if self._update is not None and now <= self._update:
            return

        self._weather.update()

        # Find sensor
        for time_entry in self._weather.data['product']['time']:
            valid_from = dt_util.parse_datetime(time_entry['@from'])
            valid_to = dt_util.parse_datetime(time_entry['@to'])

            loc_data = time_entry['location']

            if self.type not in loc_data or now >= valid_to:
                continue

            self._update = valid_to

            if self.type == 'precipitation' and valid_from < now:
                self._state = loc_data[self.type]['@value']
                break
            elif self.type == 'symbol' and valid_from < now:
                self._state = loc_data[self.type]['@number']
                break
            elif self.type in ('temperature', 'pressure', 'humidity',
                               'dewpointTemperature'):
                self._state = loc_data[self.type]['@value']
                break
            elif self.type in ('windSpeed', 'windGust'):
                self._state = loc_data[self.type]['@mps']
                break
            elif self.type == 'windDirection':
                self._state = float(loc_data[self.type]['@deg'])
                break
            elif self.type in ('fog', 'cloudiness', 'lowClouds',
                               'mediumClouds', 'highClouds'):
                self._state = loc_data[self.type]['@percent']
                break
示例#28
0
    def from_dict(cls, json_dict):
        """Initialize a state from a dict.

        Ensures: state == State.from_json_dict(state.to_json_dict())
        """
        if not (json_dict and 'entity_id' in json_dict and
                'state' in json_dict):
            return None

        last_changed = json_dict.get('last_changed')

        if isinstance(last_changed, str):
            last_changed = dt_util.parse_datetime(last_changed)

        last_updated = json_dict.get('last_updated')

        if isinstance(last_updated, str):
            last_updated = dt_util.parse_datetime(last_updated)

        return cls(json_dict['entity_id'], json_dict['state'],
                   json_dict.get('attributes'), last_changed, last_updated)
示例#29
0
    def from_dict(cls, json_dict):
        """Initialize a state from a dict.

        Async friendly.

        Ensures: state == State.from_json_dict(state.to_json_dict())
        """
        if not (json_dict and "entity_id" in json_dict and "state" in json_dict):
            return None

        last_changed = json_dict.get("last_changed")

        if isinstance(last_changed, str):
            last_changed = dt_util.parse_datetime(last_changed)

        last_updated = json_dict.get("last_updated")

        if isinstance(last_updated, str):
            last_updated = dt_util.parse_datetime(last_updated)

        return cls(json_dict["entity_id"], json_dict["state"], json_dict.get("attributes"), last_changed, last_updated)
示例#30
0
def next_rising_utc(hass, entity_id=None):
    """UTC datetime object of the next sun rising."""
    entity_id = entity_id or ENTITY_ID

    state = hass.states.get(ENTITY_ID)

    try:
        return dt_util.parse_datetime(state.attributes[STATE_ATTR_NEXT_RISING])
    except (AttributeError, KeyError):
        # AttributeError if state is None
        # KeyError if STATE_ATTR_NEXT_RISING does not exist
        return None
示例#31
0
    async def async_added_to_hass(self) -> None:
        """Startup with initial state or previous state."""
        await super().async_added_to_hass()

        state = await self.async_get_last_state()
        if state:
            enable_automation = state.state == STATE_ON
            last_triggered = state.attributes.get("last_triggered")
            if last_triggered is not None:
                self._last_triggered = parse_datetime(last_triggered)
            _LOGGER.debug(
                "Loaded automation %s with state %s from state "
                " storage last state %s",
                self.entity_id,
                enable_automation,
                state,
            )
        else:
            enable_automation = DEFAULT_INITIAL_STATE
            _LOGGER.debug(
                "Automation %s not in state storage, state %s from default is used",
                self.entity_id,
                enable_automation,
            )

        if self._initial_state is not None:
            enable_automation = self._initial_state
            _LOGGER.debug(
                "Automation %s initial state %s overridden from "
                "config initial_state",
                self.entity_id,
                enable_automation,
            )

        if enable_automation:
            await self.async_enable()
示例#32
0
def get_next_departure(
    schedule: Any,
    start_station_id: Any,
    end_station_id: Any,
    offset: cv.time_period,
    include_tomorrow: bool = False,
) -> dict:
    """Get the next departure for the given schedule."""
    now = dt_util.now().replace(tzinfo=None) + offset
    now_date = now.strftime(dt_util.DATE_STR_FORMAT)
    yesterday = now - datetime.timedelta(days=1)
    yesterday_date = yesterday.strftime(dt_util.DATE_STR_FORMAT)
    tomorrow = now + datetime.timedelta(days=1)
    tomorrow_date = tomorrow.strftime(dt_util.DATE_STR_FORMAT)

    # Fetch all departures for yesterday, today and optionally tomorrow,
    # up to an overkill maximum in case of a departure every minute for those
    # days.
    limit = 24 * 60 * 60 * 2
    tomorrow_select = tomorrow_where = tomorrow_order = ""
    if include_tomorrow:
        limit = int(limit / 2 * 3)
        tomorrow_name = tomorrow.strftime("%A").lower()
        tomorrow_select = f"calendar.{tomorrow_name} AS tomorrow,"
        tomorrow_where = f"OR calendar.{tomorrow_name} = 1"
        tomorrow_order = f"calendar.{tomorrow_name} DESC,"

    sql_query = f"""
        SELECT trip.trip_id, trip.route_id,
               time(origin_stop_time.arrival_time) AS origin_arrival_time,
               time(origin_stop_time.departure_time) AS origin_depart_time,
               date(origin_stop_time.departure_time) AS origin_depart_date,
               origin_stop_time.drop_off_type AS origin_drop_off_type,
               origin_stop_time.pickup_type AS origin_pickup_type,
               origin_stop_time.shape_dist_traveled AS origin_dist_traveled,
               origin_stop_time.stop_headsign AS origin_stop_headsign,
               origin_stop_time.stop_sequence AS origin_stop_sequence,
               origin_stop_time.timepoint AS origin_stop_timepoint,
               time(destination_stop_time.arrival_time) AS dest_arrival_time,
               time(destination_stop_time.departure_time) AS dest_depart_time,
               destination_stop_time.drop_off_type AS dest_drop_off_type,
               destination_stop_time.pickup_type AS dest_pickup_type,
               destination_stop_time.shape_dist_traveled AS dest_dist_traveled,
               destination_stop_time.stop_headsign AS dest_stop_headsign,
               destination_stop_time.stop_sequence AS dest_stop_sequence,
               destination_stop_time.timepoint AS dest_stop_timepoint,
               calendar.{yesterday.strftime("%A").lower()} AS yesterday,
               calendar.{now.strftime("%A").lower()} AS today,
               {tomorrow_select}
               calendar.start_date AS start_date,
               calendar.end_date AS end_date
        FROM trips trip
        INNER JOIN calendar calendar
                   ON trip.service_id = calendar.service_id
        INNER JOIN stop_times origin_stop_time
                   ON trip.trip_id = origin_stop_time.trip_id
        INNER JOIN stops start_station
                   ON origin_stop_time.stop_id = start_station.stop_id
        INNER JOIN stop_times destination_stop_time
                   ON trip.trip_id = destination_stop_time.trip_id
        INNER JOIN stops end_station
                   ON destination_stop_time.stop_id = end_station.stop_id
        WHERE (calendar.{yesterday.strftime("%A").lower()} = 1
               OR calendar.{now.strftime("%A").lower()} = 1
               {tomorrow_where}
               )
        AND start_station.stop_id = :origin_station_id
                   AND end_station.stop_id = :end_station_id
        AND origin_stop_sequence < dest_stop_sequence
        AND calendar.start_date <= :today
        AND calendar.end_date >= :today
        ORDER BY calendar.{yesterday.strftime("%A").lower()} DESC,
                 calendar.{now.strftime("%A").lower()} DESC,
                 {tomorrow_order}
                 origin_stop_time.departure_time
        LIMIT :limit
        """
    result = schedule.engine.execute(
        text(sql_query),
        origin_station_id=start_station_id,
        end_station_id=end_station_id,
        today=now_date,
        limit=limit,
    )

    # Create lookup timetable for today and possibly tomorrow, taking into
    # account any departures from yesterday scheduled after midnight,
    # as long as all departures are within the calendar date range.
    timetable = {}
    yesterday_start = today_start = tomorrow_start = None
    yesterday_last = today_last = ""

    for row in result:
        if row["yesterday"] == 1 and yesterday_date >= row["start_date"]:
            extras = {"day": "yesterday", "first": None, "last": False}
            if yesterday_start is None:
                yesterday_start = row["origin_depart_date"]
            if yesterday_start != row["origin_depart_date"]:
                idx = f"{now_date} {row['origin_depart_time']}"
                timetable[idx] = {**row, **extras}
                yesterday_last = idx

        if row["today"] == 1:
            extras = {"day": "today", "first": False, "last": False}
            if today_start is None:
                today_start = row["origin_depart_date"]
                extras["first"] = True
            if today_start == row["origin_depart_date"]:
                idx_prefix = now_date
            else:
                idx_prefix = tomorrow_date
            idx = f"{idx_prefix} {row['origin_depart_time']}"
            timetable[idx] = {**row, **extras}
            today_last = idx

        if ("tomorrow" in row and row["tomorrow"] == 1
                and tomorrow_date <= row["end_date"]):
            extras = {"day": "tomorrow", "first": False, "last": None}
            if tomorrow_start is None:
                tomorrow_start = row["origin_depart_date"]
                extras["first"] = True
            if tomorrow_start == row["origin_depart_date"]:
                idx = f"{tomorrow_date} {row['origin_depart_time']}"
                timetable[idx] = {**row, **extras}

    # Flag last departures.
    for idx in filter(None, [yesterday_last, today_last]):
        timetable[idx]["last"] = True

    _LOGGER.debug("Timetable: %s", sorted(timetable.keys()))

    item = {}
    for key in sorted(timetable.keys()):
        if dt_util.parse_datetime(key) > now:
            item = timetable[key]
            _LOGGER.debug("Departure found for station %s @ %s -> %s",
                          start_station_id, key, item)
            break

    if item == {}:
        return {}

    # Format arrival and departure dates and times, accounting for the
    # possibility of times crossing over midnight.
    origin_arrival = now
    if item["origin_arrival_time"] > item["origin_depart_time"]:
        origin_arrival -= datetime.timedelta(days=1)
    origin_arrival_time = (
        f"{origin_arrival.strftime(dt_util.DATE_STR_FORMAT)} "
        f"{item['origin_arrival_time']}")

    origin_depart_time = f"{now_date} {item['origin_depart_time']}"

    dest_arrival = now
    if item["dest_arrival_time"] < item["origin_depart_time"]:
        dest_arrival += datetime.timedelta(days=1)
    dest_arrival_time = (f"{dest_arrival.strftime(dt_util.DATE_STR_FORMAT)} "
                         f"{item['dest_arrival_time']}")

    dest_depart = dest_arrival
    if item["dest_depart_time"] < item["dest_arrival_time"]:
        dest_depart += datetime.timedelta(days=1)
    dest_depart_time = (f"{dest_depart.strftime(dt_util.DATE_STR_FORMAT)} "
                        f"{item['dest_depart_time']}")

    depart_time = dt_util.parse_datetime(origin_depart_time)
    arrival_time = dt_util.parse_datetime(dest_arrival_time)

    origin_stop_time = {
        "Arrival Time": origin_arrival_time,
        "Departure Time": origin_depart_time,
        "Drop Off Type": item["origin_drop_off_type"],
        "Pickup Type": item["origin_pickup_type"],
        "Shape Dist Traveled": item["origin_dist_traveled"],
        "Headsign": item["origin_stop_headsign"],
        "Sequence": item["origin_stop_sequence"],
        "Timepoint": item["origin_stop_timepoint"],
    }

    destination_stop_time = {
        "Arrival Time": dest_arrival_time,
        "Departure Time": dest_depart_time,
        "Drop Off Type": item["dest_drop_off_type"],
        "Pickup Type": item["dest_pickup_type"],
        "Shape Dist Traveled": item["dest_dist_traveled"],
        "Headsign": item["dest_stop_headsign"],
        "Sequence": item["dest_stop_sequence"],
        "Timepoint": item["dest_stop_timepoint"],
    }

    return {
        "trip_id": item["trip_id"],
        "route_id": item["route_id"],
        "day": item["day"],
        "first": item["first"],
        "last": item["last"],
        "departure_time": depart_time,
        "arrival_time": arrival_time,
        "origin_stop_time": origin_stop_time,
        "destination_stop_time": destination_stop_time,
    }
示例#33
0
            try:
                entity_ids = cv.entity_ids(entity_ids_str)
            except vol.Invalid:
                raise InvalidEntityFormatError(
                    f"Invalid entity id(s) encountered: {entity_ids_str}. "
                    "Format should be <domain>.<object_id>") from vol.Invalid
        else:
            entity_ids = None

        if (end_time_str := request.query.get("end_time")) is None:
            start_day = dt_util.as_utc(datetime_dt) - timedelta(days=period -
                                                                1)
            end_day = start_day + timedelta(days=period)
        else:
            start_day = datetime_dt
            if (end_day_dt := dt_util.parse_datetime(end_time_str)) is None:
                return self.json_message("Invalid end_time",
                                         HTTPStatus.BAD_REQUEST)
            end_day = end_day_dt

        hass = request.app["hass"]

        context_id = request.query.get("context_id")

        if entity_ids and context_id:
            return self.json_message("Can't combine entity with context_id",
                                     HTTPStatus.BAD_REQUEST)

        event_types = async_determine_event_types(hass, entity_ids, None)
        event_processor = EventProcessor(
            hass,
示例#34
0
    def update_entity_trigger(entity_id, new_state=None):
        """Update the entity trigger for the entity_id."""
        # If a listener was already set up for entity, remove it.
        remove = entities.pop(entity_id, None)
        if remove:
            remove()
            remove = None

        if not new_state:
            return

        # Check state of entity. If valid, set up a listener.
        if new_state.domain == "input_datetime":
            has_date = new_state.attributes["has_date"]
            if has_date:
                year = new_state.attributes["year"]
                month = new_state.attributes["month"]
                day = new_state.attributes["day"]
            has_time = new_state.attributes["has_time"]
            if has_time:
                hour = new_state.attributes["hour"]
                minute = new_state.attributes["minute"]
                second = new_state.attributes["second"]
            else:
                # If no time then use midnight.
                hour = minute = second = 0

            if has_date:
                # If input_datetime has date, then track point in time.
                trigger_dt = datetime(
                    year,
                    month,
                    day,
                    hour,
                    minute,
                    second,
                    tzinfo=dt_util.DEFAULT_TIME_ZONE,
                )
                # Only set up listener if time is now or in the future.
                if trigger_dt >= dt_util.now():
                    remove = async_track_point_in_time(
                        hass,
                        partial(
                            time_automation_listener,
                            f"time set in {entity_id}",
                            entity_id=entity_id,
                        ),
                        trigger_dt,
                    )
            elif has_time:
                # Else if it has time, then track time change.
                remove = async_track_time_change(
                    hass,
                    partial(
                        time_automation_listener,
                        f"time set in {entity_id}",
                        entity_id=entity_id,
                    ),
                    hour=hour,
                    minute=minute,
                    second=second,
                )
        elif (new_state.domain == "sensor"
              and new_state.attributes.get(ATTR_DEVICE_CLASS)
              == sensor.DEVICE_CLASS_TIMESTAMP
              and new_state.state not in (STATE_UNAVAILABLE, STATE_UNKNOWN)):
            trigger_dt = dt_util.parse_datetime(new_state.state)

            if trigger_dt is not None and trigger_dt > dt_util.utcnow():
                remove = async_track_point_in_time(
                    hass,
                    partial(
                        time_automation_listener,
                        f"time set in {entity_id}",
                        entity_id=entity_id,
                    ),
                    trigger_dt,
                )

        # Was a listener set up?
        if remove:
            entities[entity_id] = remove
示例#35
0
    def _update_period(self):  # pylint: disable=r0912
        """Parse the templates and calculate a datetime tuples."""
        start = end = None
        now = dt_util.now()

        # Parse start
        _LOGGER.debug("Process start template: %s", self._start_template)
        if self._start_template is not None:
            try:
                start_rendered = self._start_template.render()
            except (TemplateError, TypeError) as ex:
                self.handle_template_exception(ex, "start")
                return
            if isinstance(start_rendered, str):
                start = dt_util.parse_datetime(start_rendered)
            if start is None:
                try:
                    start = dt_util.as_local(
                        dt_util.utc_from_timestamp(
                            math.floor(float(start_rendered))))
                except ValueError:
                    _LOGGER.error(
                        'Parsing error: field "start" must be a datetime or a timestamp'
                    )
                    return

        # Parse end
        _LOGGER.debug("Process end template: %s", self._end_template)
        if self._end_template is not None:
            try:
                end_rendered = self._end_template.render()
            except (TemplateError, TypeError) as ex:
                self.handle_template_exception(ex, "end")
                return
            if isinstance(end_rendered, str):
                end = dt_util.parse_datetime(end_rendered)
            if end is None:
                try:
                    end = dt_util.as_local(
                        dt_util.utc_from_timestamp(
                            math.floor(float(end_rendered))))
                except ValueError:
                    _LOGGER.error(
                        'Parsing error: field "end" must be a datetime or a timestamp'
                    )
                    return

        # Calculate start or end using the duration
        _LOGGER.debug("Process duration: %s", self._duration)
        if self._duration is not None:
            if start is None:
                if end is None:
                    end = now
                start = end - self._duration
            else:
                end = start + self._duration

        _LOGGER.debug("Start: %s, End: %s", start, end)
        if start is None or end is None:
            return

        if start > now:
            # History hasn't been written yet for this period
            return
        if now < end:
            # No point in making stats of the future
            end = now

        self._period = start, end
        self.start = start.replace(microsecond=0).isoformat()
        self.end = end.replace(microsecond=0).isoformat()
示例#36
0
    def handle_new_task(call: ServiceCall) -> None:
        """Call when a user creates a new Todoist Task from Home Assistant."""
        project_name = call.data[PROJECT_NAME]
        project_id = project_id_lookup[project_name]

        # Create the task
        item = api.items.add(call.data[CONTENT], project_id=project_id)

        if LABELS in call.data:
            task_labels = call.data[LABELS]
            label_ids = [
                label_id_lookup[label.lower()] for label in task_labels
            ]
            item.update(labels=label_ids)

        if ASSIGNEE in call.data:
            task_assignee = call.data[ASSIGNEE].lower()
            if task_assignee in collaborator_id_lookup:
                item.update(
                    responsible_uid=collaborator_id_lookup[task_assignee])
            else:
                raise ValueError(
                    f"User is not part of the shared project. user: {task_assignee}"
                )

        if PRIORITY in call.data:
            item.update(priority=call.data[PRIORITY])

        _due: dict = {}
        if DUE_DATE_STRING in call.data:
            _due["string"] = call.data[DUE_DATE_STRING]

        if DUE_DATE_LANG in call.data:
            _due["lang"] = call.data[DUE_DATE_LANG]

        if DUE_DATE in call.data:
            due_date = dt.parse_datetime(call.data[DUE_DATE])
            if due_date is None:
                due = dt.parse_date(call.data[DUE_DATE])
                if due is None:
                    raise ValueError(
                        f"Invalid due_date: {call.data[DUE_DATE]}")
                due_date = datetime(due.year, due.month, due.day)
            # Format it in the manner Todoist expects
            due_date = dt.as_utc(due_date)
            date_format = "%Y-%m-%dT%H:%M:%S"
            _due["date"] = datetime.strftime(due_date, date_format)

        if _due:
            item.update(due=_due)

        _reminder_due: dict = {}
        if REMINDER_DATE_STRING in call.data:
            _reminder_due["string"] = call.data[REMINDER_DATE_STRING]

        if REMINDER_DATE_LANG in call.data:
            _reminder_due["lang"] = call.data[REMINDER_DATE_LANG]

        if REMINDER_DATE in call.data:
            due_date = dt.parse_datetime(call.data[REMINDER_DATE])
            if due_date is None:
                due = dt.parse_date(call.data[REMINDER_DATE])
                if due is None:
                    raise ValueError(
                        f"Invalid reminder_date: {call.data[REMINDER_DATE]}")
                due_date = datetime(due.year, due.month, due.day)
            # Format it in the manner Todoist expects
            due_date = dt.as_utc(due_date)
            date_format = "%Y-%m-%dT%H:%M:%S"
            _reminder_due["date"] = datetime.strftime(due_date, date_format)

        if _reminder_due:
            api.reminders.add(item["id"], due=_reminder_due)

        # Commit changes
        api.commit()
        _LOGGER.debug("Created Todoist task: %s", call.data[CONTENT])
示例#37
0
async def test_setting_rising(hass, legacy_patchable_time):
    """Test retrieving sun setting and rising."""
    utc_now = datetime(2016, 11, 1, 8, 0, 0, tzinfo=dt_util.UTC)
    with patch("homeassistant.helpers.condition.dt_util.utcnow", return_value=utc_now):
        await async_setup_component(
            hass, sun.DOMAIN, {sun.DOMAIN: {sun.CONF_ELEVATION: 0}}
        )

    await hass.async_block_till_done()
    state = hass.states.get(sun.ENTITY_ID)

    from astral import Astral

    astral = Astral()
    utc_today = utc_now.date()

    latitude = hass.config.latitude
    longitude = hass.config.longitude

    mod = -1
    while True:
        next_dawn = astral.dawn_utc(
            utc_today + timedelta(days=mod), latitude, longitude
        )
        if next_dawn > utc_now:
            break
        mod += 1

    mod = -1
    while True:
        next_dusk = astral.dusk_utc(
            utc_today + timedelta(days=mod), latitude, longitude
        )
        if next_dusk > utc_now:
            break
        mod += 1

    mod = -1
    while True:
        next_midnight = astral.solar_midnight_utc(
            utc_today + timedelta(days=mod), longitude
        )
        if next_midnight > utc_now:
            break
        mod += 1

    mod = -1
    while True:
        next_noon = astral.solar_noon_utc(utc_today + timedelta(days=mod), longitude)
        if next_noon > utc_now:
            break
        mod += 1

    mod = -1
    while True:
        next_rising = astral.sunrise_utc(
            utc_today + timedelta(days=mod), latitude, longitude
        )
        if next_rising > utc_now:
            break
        mod += 1

    mod = -1
    while True:
        next_setting = astral.sunset_utc(
            utc_today + timedelta(days=mod), latitude, longitude
        )
        if next_setting > utc_now:
            break
        mod += 1

    assert next_dawn == dt_util.parse_datetime(
        state.attributes[sun.STATE_ATTR_NEXT_DAWN]
    )
    assert next_dusk == dt_util.parse_datetime(
        state.attributes[sun.STATE_ATTR_NEXT_DUSK]
    )
    assert next_midnight == dt_util.parse_datetime(
        state.attributes[sun.STATE_ATTR_NEXT_MIDNIGHT]
    )
    assert next_noon == dt_util.parse_datetime(
        state.attributes[sun.STATE_ATTR_NEXT_NOON]
    )
    assert next_rising == dt_util.parse_datetime(
        state.attributes[sun.STATE_ATTR_NEXT_RISING]
    )
    assert next_setting == dt_util.parse_datetime(
        state.attributes[sun.STATE_ATTR_NEXT_SETTING]
    )
示例#38
0
def convert_until(status_dict, until_key) -> str:
    """Convert datetime string from "%Y-%m-%dT%H:%M:%SZ" to local/aware/isoformat."""
    if until_key in status_dict:  # only present for certain modes
        dt_utc_naive = dt_util.parse_datetime(status_dict[until_key])
        status_dict[until_key] = dt_util.as_local(dt_utc_naive).isoformat()
示例#39
0
    async def updating_devices(self, *_):
        """Find the current data from self.data."""
        if not self.data:
            return

        now = dt_util.utcnow()
        forecast_time = now + dt_util.dt.timedelta(hours=self._forecast)

        # Find the correct time entry. Since not all time entries contain all
        # types of data, we cannot just select one. Instead, we order  them by
        # distance from the desired forecast_time, and for every device iterate
        # them in order of increasing distance, taking the first time_point
        # that contains the desired data.

        ordered_entries = []

        for time_entry in self.data['product']['time']:
            valid_from = dt_util.parse_datetime(time_entry['@from'])
            valid_to = dt_util.parse_datetime(time_entry['@to'])

            if now >= valid_to:
                # Has already passed. Never select this.
                continue

            average_dist = (abs((valid_to - forecast_time).total_seconds()) +
                            abs((valid_from - forecast_time).total_seconds()))

            ordered_entries.append((average_dist, time_entry))

        ordered_entries.sort(key=lambda item: item[0])

        # Update all devices
        tasks = []
        if ordered_entries:
            for dev in self.devices:
                new_state = None

                for (_, selected_time_entry) in ordered_entries:
                    loc_data = selected_time_entry['location']

                    if dev.type not in loc_data:
                        continue

                    if dev.type == 'precipitation':
                        new_state = loc_data[dev.type]['@value']
                    elif dev.type == 'symbol':
                        new_state = loc_data[dev.type]['@number']
                    elif dev.type in ('temperature', 'pressure', 'humidity',
                                      'dewpointTemperature'):
                        new_state = loc_data[dev.type]['@value']
                    elif dev.type in ('windSpeed', 'windGust'):
                        new_state = loc_data[dev.type]['@mps']
                    elif dev.type == 'windDirection':
                        new_state = float(loc_data[dev.type]['@deg'])
                    elif dev.type in ('fog', 'cloudiness', 'lowClouds',
                                      'mediumClouds', 'highClouds'):
                        new_state = loc_data[dev.type]['@percent']

                    break

                # pylint: disable=protected-access
                if new_state != dev._state:
                    dev._state = new_state
                    tasks.append(dev.async_update_ha_state())

        if tasks:
            await asyncio.wait(tasks, loop=self.hass.loop)
示例#40
0
    async def _async_load(self) -> None:
        """Load the users."""
        data = await self._store.async_load()

        # Make sure that we're not overriding data if 2 loads happened at the
        # same time
        if self._users is not None:
            return

        if data is None:
            self._set_defaults()
            return

        users = OrderedDict()  # type: Dict[str, models.User]
        groups = OrderedDict()  # type: Dict[str, models.Group]

        # When creating objects we mention each attribute explicetely. This
        # prevents crashing if user rolls back HA version after a new property
        # was added.

        for group_dict in data.get('groups', []):
            groups[group_dict['id']] = models.Group(
                name=group_dict['name'],
                id=group_dict['id'],
            )

        migrate_group = None

        if not groups:
            migrate_group = models.Group(name=INITIAL_GROUP_NAME)
            groups[migrate_group.id] = migrate_group

        for user_dict in data['users']:
            users[user_dict['id']] = models.User(
                name=user_dict['name'],
                groups=[groups[group_id] for group_id
                        in user_dict.get('group_ids', [])],
                id=user_dict['id'],
                is_owner=user_dict['is_owner'],
                is_active=user_dict['is_active'],
                system_generated=user_dict['system_generated'],
            )
            if migrate_group is not None and not user_dict['system_generated']:
                users[user_dict['id']].groups = [migrate_group]

        for cred_dict in data['credentials']:
            users[cred_dict['user_id']].credentials.append(models.Credentials(
                id=cred_dict['id'],
                is_new=False,
                auth_provider_type=cred_dict['auth_provider_type'],
                auth_provider_id=cred_dict['auth_provider_id'],
                data=cred_dict['data'],
            ))

        for rt_dict in data['refresh_tokens']:
            # Filter out the old keys that don't have jwt_key (pre-0.76)
            if 'jwt_key' not in rt_dict:
                continue

            created_at = dt_util.parse_datetime(rt_dict['created_at'])
            if created_at is None:
                getLogger(__name__).error(
                    'Ignoring refresh token %(id)s with invalid created_at '
                    '%(created_at)s for user_id %(user_id)s', rt_dict)
                continue

            token_type = rt_dict.get('token_type')
            if token_type is None:
                if rt_dict['client_id'] is None:
                    token_type = models.TOKEN_TYPE_SYSTEM
                else:
                    token_type = models.TOKEN_TYPE_NORMAL

            # old refresh_token don't have last_used_at (pre-0.78)
            last_used_at_str = rt_dict.get('last_used_at')
            if last_used_at_str:
                last_used_at = dt_util.parse_datetime(last_used_at_str)
            else:
                last_used_at = None

            token = models.RefreshToken(
                id=rt_dict['id'],
                user=users[rt_dict['user_id']],
                client_id=rt_dict['client_id'],
                # use dict.get to keep backward compatibility
                client_name=rt_dict.get('client_name'),
                client_icon=rt_dict.get('client_icon'),
                token_type=token_type,
                created_at=created_at,
                access_token_expiration=timedelta(
                    seconds=rt_dict['access_token_expiration']),
                token=rt_dict['token'],
                jwt_key=rt_dict['jwt_key'],
                last_used_at=last_used_at,
                last_used_ip=rt_dict.get('last_used_ip'),
            )
            users[rt_dict['user_id']].refresh_tokens[token.id] = token

        self._groups = groups
        self._users = users
示例#41
0
def convert_until(status_dict: dict, until_key: str) -> None:
    """Reformat a dt str from "%Y-%m-%dT%H:%M:%SZ" as local/aware/isoformat."""
    if until_key in status_dict:  # only present for certain modes
        dt_utc_naive = dt_util.parse_datetime(status_dict[until_key])
        status_dict[until_key] = dt_util.as_local(dt_utc_naive).isoformat()
示例#42
0
 def last_update(self):
     """Return the last_update time for the device."""
     last_update = parse_datetime(self.device.last_update)
     return last_update
示例#43
0
def get_prayer_time_as_dt(prayer_time):
    """Create a datetime object for the respective prayer time."""
    today = datetime.today().strftime("%Y-%m-%d")
    date_time_str = "{} {}".format(str(today), prayer_time)
    pt_dt = dt_util.parse_datetime(date_time_str)
    return pt_dt
示例#44
0
 def test_parse_datetime_returns_none_for_incorrect_format(self):
     """Test parse_datetime returns None if incorrect format."""
     self.assertIsNone(dt_util.parse_datetime("not a datetime string"))
示例#45
0
文件: yr.py 项目: loraxx753/skynet
    def async_update(self, *_):
        """Get the latest data from yr.no."""
        def try_again(err: str):
            """Retry in 15 minutes."""
            _LOGGER.warning('Retrying in 15 minutes: %s', err)
            self._nextrun = None
            nxt = dt_util.utcnow() + timedelta(minutes=15)
            if nxt.minute >= 15:
                async_track_point_in_utc_time(self.hass, self.async_update,
                                              nxt)

        if self._nextrun is None or dt_util.utcnow() >= self._nextrun:
            resp = None
            try:
                websession = async_get_clientsession(self.hass)
                with async_timeout.timeout(10, loop=self.hass.loop):
                    resp = yield from websession.get(self._url,
                                                     params=self._urlparams)
                if resp.status != 200:
                    try_again('{} returned {}'.format(resp.url, resp.status))
                    return
                text = yield from resp.text()

            except (asyncio.TimeoutError, aiohttp.errors.ClientError,
                    aiohttp.errors.ClientDisconnectedError) as err:
                try_again(err)
                return

            finally:
                if resp is not None:
                    self.hass.async_add_job(resp.release())

            try:
                import xmltodict
                self.data = xmltodict.parse(text)['weatherdata']
                model = self.data['meta']['model']
                if '@nextrun' not in model:
                    model = model[0]
                self._nextrun = dt_util.parse_datetime(model['@nextrun'])
            except (ExpatError, IndexError) as err:
                try_again(err)
                return

        now = dt_util.utcnow()

        tasks = []
        # Update all devices
        for dev in self.devices:
            # Find sensor
            for time_entry in self.data['product']['time']:
                valid_from = dt_util.parse_datetime(time_entry['@from'])
                valid_to = dt_util.parse_datetime(time_entry['@to'])
                new_state = None

                loc_data = time_entry['location']

                if dev.type not in loc_data or now >= valid_to:
                    continue

                if dev.type == 'precipitation' and valid_from < now:
                    new_state = loc_data[dev.type]['@value']
                    break
                elif dev.type == 'symbol' and valid_from < now:
                    new_state = loc_data[dev.type]['@number']
                    break
                elif dev.type in ('temperature', 'pressure', 'humidity',
                                  'dewpointTemperature'):
                    new_state = loc_data[dev.type]['@value']
                    break
                elif dev.type in ('windSpeed', 'windGust'):
                    new_state = loc_data[dev.type]['@mps']
                    break
                elif dev.type == 'windDirection':
                    new_state = float(loc_data[dev.type]['@deg'])
                    break
                elif dev.type in ('fog', 'cloudiness', 'lowClouds',
                                  'mediumClouds', 'highClouds'):
                    new_state = loc_data[dev.type]['@percent']
                    break

            # pylint: disable=protected-access
            if new_state != dev._state:
                dev._state = new_state
                tasks.append(dev.async_update_ha_state())

        if tasks:
            yield from asyncio.wait(tasks, loop=self.hass.loop)
示例#46
0
    def forecast(self):
        """Return the forecast."""
        # Check if forecasts are available
        raw_forecasts = self.coordinator.data.get(FORECASTS,
                                                  {}).get(self.forecast_type)
        if not raw_forecasts:
            return None

        forecasts = []
        max_forecasts = MAX_FORECASTS[self.forecast_type]
        forecast_count = 0

        # Set default values (in cases where keys don't exist), None will be
        # returned. Override properties per forecast type as needed
        for forecast in raw_forecasts:
            forecast_dt = dt_util.parse_datetime(forecast[CC_ATTR_TIMESTAMP])

            # Throw out past data
            if forecast_dt.date() < dt_util.utcnow().date():
                continue

            values = forecast["values"]
            use_datetime = True

            condition = values.get(CC_ATTR_CONDITION)
            precipitation = values.get(CC_ATTR_PRECIPITATION)
            precipitation_probability = values.get(
                CC_ATTR_PRECIPITATION_PROBABILITY)

            temp = values.get(CC_ATTR_TEMPERATURE_HIGH)
            temp_low = values.get(CC_ATTR_TEMPERATURE_LOW)
            wind_direction = values.get(CC_ATTR_WIND_DIRECTION)
            wind_speed = values.get(CC_ATTR_WIND_SPEED)

            if self.forecast_type == DAILY:
                use_datetime = False
                if precipitation:
                    precipitation = precipitation * 24
            elif self.forecast_type == NOWCAST:
                # Precipitation is forecasted in CONF_TIMESTEP increments but in a
                # per hour rate, so value needs to be converted to an amount.
                if precipitation:
                    precipitation = (precipitation / 60 *
                                     self._config_entry.options[CONF_TIMESTEP])

            forecasts.append(
                self._forecast_dict(
                    forecast_dt,
                    use_datetime,
                    condition,
                    precipitation,
                    precipitation_probability,
                    temp,
                    temp_low,
                    wind_direction,
                    wind_speed,
                ))

            forecast_count += 1
            if forecast_count == max_forecasts:
                break

        return forecasts
示例#47
0
    def forecast(self):
        """Return the forecast."""
        # Check if forecasts are available
        raw_forecasts = self.coordinator.data.get(FORECASTS,
                                                  {}).get(self.forecast_type)
        if not raw_forecasts:
            return None

        forecasts = []

        # Set default values (in cases where keys don't exist), None will be
        # returned. Override properties per forecast type as needed
        for forecast in raw_forecasts:
            forecast_dt = dt_util.parse_datetime(
                self._get_cc_value(forecast, CC_V3_ATTR_TIMESTAMP))
            use_datetime = True
            condition = self._get_cc_value(forecast, CC_V3_ATTR_CONDITION)
            precipitation = self._get_cc_value(forecast,
                                               CC_V3_ATTR_PRECIPITATION)
            precipitation_probability = self._get_cc_value(
                forecast, CC_V3_ATTR_PRECIPITATION_PROBABILITY)
            temp = self._get_cc_value(forecast, CC_V3_ATTR_TEMPERATURE)
            temp_low = None
            wind_direction = self._get_cc_value(forecast,
                                                CC_V3_ATTR_WIND_DIRECTION)
            wind_speed = self._get_cc_value(forecast, CC_V3_ATTR_WIND_SPEED)

            if self.forecast_type == DAILY:
                use_datetime = False
                forecast_dt = dt_util.start_of_local_day(forecast_dt)
                precipitation = self._get_cc_value(
                    forecast, CC_V3_ATTR_PRECIPITATION_DAILY)
                temp = next(
                    (self._get_cc_value(item, CC_V3_ATTR_TEMPERATURE_HIGH)
                     for item in forecast[CC_V3_ATTR_TEMPERATURE]
                     if "max" in item),
                    temp,
                )
                temp_low = next(
                    (self._get_cc_value(item, CC_V3_ATTR_TEMPERATURE_LOW)
                     for item in forecast[CC_V3_ATTR_TEMPERATURE]
                     if "min" in item),
                    temp_low,
                )
            elif self.forecast_type == NOWCAST and precipitation:
                # Precipitation is forecasted in CONF_TIMESTEP increments but in a
                # per hour rate, so value needs to be converted to an amount.
                precipitation = (precipitation / 60 *
                                 self._config_entry.options[CONF_TIMESTEP])

            forecasts.append(
                self._forecast_dict(
                    forecast_dt,
                    use_datetime,
                    condition,
                    precipitation,
                    precipitation_probability,
                    temp,
                    temp_low,
                    wind_direction,
                    wind_speed,
                ))

        return forecasts
示例#48
0
def test_parse_datetime_returns_none_for_incorrect_format():
    """Test parse_datetime returns None if incorrect format."""
    assert dt_util.parse_datetime("not a datetime string") is None
示例#49
0
    def handle_new_task(call):
        """Call when a user creates a new Todoist Task from Home Assistant."""
        project_name = call.data[PROJECT_NAME]
        project_id = project_id_lookup[project_name]

        # Create the task
        item = api.items.add(call.data[CONTENT], project_id=project_id)

        if LABELS in call.data:
            task_labels = call.data[LABELS]
            label_ids = [label_id_lookup[label.lower()] for label in task_labels]
            item.update(labels=label_ids)

        if PRIORITY in call.data:
            item.update(priority=call.data[PRIORITY])

        _due: dict = {}
        if DUE_DATE_STRING in call.data:
            _due["string"] = call.data[DUE_DATE_STRING]

        if DUE_DATE_LANG in call.data:
            _due["lang"] = call.data[DUE_DATE_LANG]

        if DUE_DATE in call.data:
            due_date = dt.parse_datetime(call.data[DUE_DATE])
            if due_date is None:
                due = dt.parse_date(call.data[DUE_DATE])
                due_date = datetime(due.year, due.month, due.day)
            # Format it in the manner Todoist expects
            due_date = dt.as_utc(due_date)
            date_format = "%Y-%m-%dT%H:%M%S"
            due_date = datetime.strftime(due_date, date_format)
            _due["date"] = due_date

        if _due:
            item.update(due=_due)

        _reminder_due: dict = {}
        if REMINDER_DATE_STRING in call.data:
            _reminder_due["string"] = call.data[REMINDER_DATE_STRING]

        if REMINDER_DATE_LANG in call.data:
            _reminder_due["lang"] = call.data[REMINDER_DATE_LANG]

        if REMINDER_DATE in call.data:
            due_date = dt.parse_datetime(call.data[REMINDER_DATE])
            if due_date is None:
                due = dt.parse_date(call.data[REMINDER_DATE])
                due_date = datetime(due.year, due.month, due.day)
            # Format it in the manner Todoist expects
            due_date = dt.as_utc(due_date)
            date_format = "%Y-%m-%dT%H:%M:%S"
            due_date = datetime.strftime(due_date, date_format)
            _reminder_due["date"] = due_date

        if _reminder_due:
            api.reminders.add(item["id"], due=_reminder_due)

        # Commit changes
        api.commit()
        _LOGGER.debug("Created Todoist task: %s", call.data[CONTENT])
示例#50
0
async def test_async_setup_entry(hass):
    """Test setup Tibber."""
    await async_init_recorder_component(hass)

    def _get_homes():
        tibber_home = AsyncMock()
        tibber_home.name = "Name"
        tibber_home.home_id = "home_id"
        tibber_home.currency = "NOK"
        tibber_home.get_historic_data.return_value = _CONSUMPTION_DATA_1
        return [tibber_home]

    tibber_connection = AsyncMock()
    tibber_connection.name = "tibber"
    tibber_connection.fetch_consumption_data_active_homes.return_value = None
    tibber_connection.get_homes = _get_homes

    coordinator = TibberDataCoordinator(hass, tibber_connection)
    await coordinator._async_update_data()
    await async_wait_recording_done_without_instance(hass)

    # Validate consumption
    statistic_id = "tibber:energy_consumption_home_id"

    stats = await hass.async_add_executor_job(
        statistics_during_period,
        hass,
        dt_util.parse_datetime(_CONSUMPTION_DATA_1[0]["from"]),
        None,
        [statistic_id],
        "hour",
        True,
    )

    assert len(stats) == 1
    assert len(stats[statistic_id]) == 3
    _sum = 0
    for k, stat in enumerate(stats[statistic_id]):
        assert stat["start"] == dt_util.parse_datetime(
            _CONSUMPTION_DATA_1[k]["from"])
        assert stat["state"] == _CONSUMPTION_DATA_1[k]["consumption"]
        assert stat["mean"] is None
        assert stat["min"] is None
        assert stat["max"] is None
        assert stat["last_reset"] is None

        _sum += _CONSUMPTION_DATA_1[k]["consumption"]
        assert stat["sum"] == _sum

    # Validate cost
    statistic_id = "tibber:energy_totalcost_home_id"

    stats = await hass.async_add_executor_job(
        statistics_during_period,
        hass,
        dt_util.parse_datetime(_CONSUMPTION_DATA_1[0]["from"]),
        None,
        [statistic_id],
        "hour",
        True,
    )

    assert len(stats) == 1
    assert len(stats[statistic_id]) == 3
    _sum = 0
    for k, stat in enumerate(stats[statistic_id]):
        assert stat["start"] == dt_util.parse_datetime(
            _CONSUMPTION_DATA_1[k]["from"])
        assert stat["state"] == _CONSUMPTION_DATA_1[k]["totalCost"]
        assert stat["mean"] is None
        assert stat["min"] is None
        assert stat["max"] is None
        assert stat["last_reset"] is None

        _sum += _CONSUMPTION_DATA_1[k]["totalCost"]
        assert stat["sum"] == _sum
示例#51
0
    async def get(
        self, request: web.Request, datetime: str | None = None
    ) -> web.Response:
        """Return history over a period of time."""
        datetime_ = None
        if datetime:
            datetime_ = dt_util.parse_datetime(datetime)

            if datetime_ is None:
                return self.json_message("Invalid datetime", HTTPStatus.BAD_REQUEST)

        now = dt_util.utcnow()

        one_day = timedelta(days=1)
        if datetime_:
            start_time = dt_util.as_utc(datetime_)
        else:
            start_time = now - one_day

        if start_time > now:
            return self.json([])

        end_time_str = request.query.get("end_time")
        if end_time_str:
            end_time = dt_util.parse_datetime(end_time_str)
            if end_time:
                end_time = dt_util.as_utc(end_time)
            else:
                return self.json_message("Invalid end_time", HTTPStatus.BAD_REQUEST)
        else:
            end_time = start_time + one_day
        entity_ids_str = request.query.get("filter_entity_id")
        entity_ids = None
        if entity_ids_str:
            entity_ids = entity_ids_str.lower().split(",")
        include_start_time_state = "skip_initial_state" not in request.query
        significant_changes_only = (
            request.query.get("significant_changes_only", "1") != "0"
        )

        minimal_response = "minimal_response" in request.query

        hass = request.app["hass"]

        if (
            not include_start_time_state
            and entity_ids
            and not _entities_may_have_state_changes_after(hass, entity_ids, start_time)
        ):
            return self.json([])

        return cast(
            web.Response,
            await hass.async_add_executor_job(
                self._sorted_significant_states_json,
                hass,
                start_time,
                end_time,
                entity_ids,
                include_start_time_state,
                significant_changes_only,
                minimal_response,
            ),
        )
示例#52
0
async def test_aemet_weather_create_sensors(hass):
    """Test creation of weather sensors."""

    now = dt_util.parse_datetime("2021-01-09 12:00:00+00:00")
    with patch("homeassistant.util.dt.now",
               return_value=now), patch("homeassistant.util.dt.utcnow",
                                        return_value=now):
        await async_init_integration(hass)

    state = hass.states.get("sensor.aemet_condition")
    assert state.state == ATTR_CONDITION_SNOWY

    state = hass.states.get("sensor.aemet_humidity")
    assert state.state == "99.0"

    state = hass.states.get("sensor.aemet_pressure")
    assert state.state == "1004.4"

    state = hass.states.get("sensor.aemet_rain")
    assert state.state == "1.8"

    state = hass.states.get("sensor.aemet_rain_probability")
    assert state.state == "100"

    state = hass.states.get("sensor.aemet_snow")
    assert state.state == "1.8"

    state = hass.states.get("sensor.aemet_snow_probability")
    assert state.state == "100"

    state = hass.states.get("sensor.aemet_station_id")
    assert state.state == "3195"

    state = hass.states.get("sensor.aemet_station_name")
    assert state.state == "MADRID RETIRO"

    state = hass.states.get("sensor.aemet_station_timestamp")
    assert state.state == "2021-01-09T12:00:00+00:00"

    state = hass.states.get("sensor.aemet_storm_probability")
    assert state.state == "0"

    state = hass.states.get("sensor.aemet_temperature")
    assert state.state == "-0.7"

    state = hass.states.get("sensor.aemet_temperature_feeling")
    assert state.state == "-4"

    state = hass.states.get("sensor.aemet_town_id")
    assert state.state == "id28065"

    state = hass.states.get("sensor.aemet_town_name")
    assert state.state == "Getafe"

    state = hass.states.get("sensor.aemet_town_timestamp")
    assert state.state == "2021-01-09 11:47:45+00:00"

    state = hass.states.get("sensor.aemet_wind_bearing")
    assert state.state == "90.0"

    state = hass.states.get("sensor.aemet_wind_max_speed")
    assert state.state == "24"

    state = hass.states.get("sensor.aemet_wind_speed")
    assert state.state == "15"
示例#53
0
 async def async_added_to_hass(self) -> None:
     """Call when the button is added to hass."""
     state = await self.async_get_last_state()
     if state is not None and state.state is not None:
         self.__last_pressed = dt_util.parse_datetime(state.state)
示例#54
0
    async def _build_media_items(self, title, category):
        """Fetch requested gameclip/screenshot media."""
        title_id, _, thumbnail = title.split("#", 2)
        owner, kind = category.split("#", 1)

        items: List[XboxMediaItem] = []
        try:
            if kind == "gameclips":
                if owner == "my":
                    response: GameclipsResponse = (
                        await self.client.gameclips.get_recent_clips_by_xuid(
                            self.client.xuid, title_id
                        )
                    )
                elif owner == "community":
                    response: GameclipsResponse = await self.client.gameclips.get_recent_community_clips_by_title_id(
                        title_id
                    )
                else:
                    return None
                items = [
                    XboxMediaItem(
                        item.user_caption
                        or dt_util.as_local(
                            dt_util.parse_datetime(item.date_recorded)
                        ).strftime("%b. %d, %Y %I:%M %p"),
                        item.thumbnails[0].uri,
                        item.game_clip_uris[0].uri,
                        MEDIA_CLASS_VIDEO,
                    )
                    for item in response.game_clips
                ]
            elif kind == "screenshots":
                if owner == "my":
                    response: ScreenshotResponse = (
                        await self.client.screenshots.get_recent_screenshots_by_xuid(
                            self.client.xuid, title_id
                        )
                    )
                elif owner == "community":
                    response: ScreenshotResponse = await self.client.screenshots.get_recent_community_screenshots_by_title_id(
                        title_id
                    )
                else:
                    return None
                items = [
                    XboxMediaItem(
                        item.user_caption
                        or dt_util.as_local(item.date_taken).strftime(
                            "%b. %d, %Y %I:%M%p"
                        ),
                        item.thumbnails[0].uri,
                        item.screenshot_uris[0].uri,
                        MEDIA_CLASS_IMAGE,
                    )
                    for item in response.screenshots
                ]
        except ValidationError:
            # Unexpected API response
            pass

        return BrowseMediaSource(
            domain=DOMAIN,
            identifier=f"{title}~~{category}",
            media_class=MEDIA_CLASS_DIRECTORY,
            media_content_type="",
            title=f"{owner.title()} {kind.title()}",
            can_play=False,
            can_expand=True,
            children=[_build_media_item(title, category, item) for item in items],
            children_media_class=MEDIA_CLASS_MAP[kind],
            thumbnail=thumbnail,
        )
示例#55
0
def _get_utc_value(entity: RenaultSensor[T]) -> datetime:
    """Return the UTC value of this entity."""
    original_dt = parse_datetime(cast(str, entity.data))
    if TYPE_CHECKING:
        assert original_dt is not None
    return as_utc(original_dt)
示例#56
0
def time(
    hass: HomeAssistant,
    before: dt_util.dt.time | str | None = None,
    after: dt_util.dt.time | str | None = None,
    weekday: None | str | Container[str] = None,
) -> bool:
    """Test if local time condition matches.

    Handle the fact that time is continuous and we may be testing for
    a period that crosses midnight. In that case it is easier to test
    for the opposite. "(23:59 <= now < 00:01)" would be the same as
    "not (00:01 <= now < 23:59)".
    """
    now = dt_util.now()
    now_time = now.time()

    if after is None:
        after = dt_util.dt.time(0)
    elif isinstance(after, str):
        after_entity = hass.states.get(after)
        if not after_entity:
            raise ConditionErrorMessage("time",
                                        f"unknown 'after' entity {after}")
        if after_entity.domain == "input_datetime":
            after = dt_util.dt.time(
                after_entity.attributes.get("hour", 23),
                after_entity.attributes.get("minute", 59),
                after_entity.attributes.get("second", 59),
            )
        elif after_entity.attributes.get(
                ATTR_DEVICE_CLASS
        ) == DEVICE_CLASS_TIMESTAMP and after_entity.state not in (
                STATE_UNAVAILABLE,
                STATE_UNKNOWN,
        ):
            after_datetime = dt_util.parse_datetime(after_entity.state)
            if after_datetime is None:
                return False
            after = dt_util.as_local(after_datetime).time()
        else:
            return False

    if before is None:
        before = dt_util.dt.time(23, 59, 59, 999999)
    elif isinstance(before, str):
        before_entity = hass.states.get(before)
        if not before_entity:
            raise ConditionErrorMessage("time",
                                        f"unknown 'before' entity {before}")
        if before_entity.domain == "input_datetime":
            before = dt_util.dt.time(
                before_entity.attributes.get("hour", 23),
                before_entity.attributes.get("minute", 59),
                before_entity.attributes.get("second", 59),
            )
        elif before_entity.attributes.get(
                ATTR_DEVICE_CLASS
        ) == DEVICE_CLASS_TIMESTAMP and before_entity.state not in (
                STATE_UNAVAILABLE,
                STATE_UNKNOWN,
        ):
            before_timedatime = dt_util.parse_datetime(before_entity.state)
            if before_timedatime is None:
                return False
            before = dt_util.as_local(before_timedatime).time()
        else:
            return False

    if after < before:
        condition_trace_update_result(after=after,
                                      now_time=now_time,
                                      before=before)
        if not after <= now_time < before:
            return False
    else:
        condition_trace_update_result(after=after,
                                      now_time=now_time,
                                      before=before)
        if before <= now_time < after:
            return False

    if weekday is not None:
        now_weekday = WEEKDAYS[now.weekday()]

        condition_trace_update_result(weekday=weekday, now_weekday=now_weekday)
        if (isinstance(weekday, str) and weekday != now_weekday
                or now_weekday not in weekday):
            return False

    return True
示例#57
0
    def calculate_timestamp(self,
                            time_str,
                            now: datetime.datetime = None,
                            iteration: int = 0) -> datetime.datetime:
        """calculate the next occurence of a time string"""
        if time_str is None:
            return None
        if now is None:
            now = dt_util.as_local(dt_util.utcnow())

        res = has_sun(time_str)
        if not res:
            # fixed time
            time = dt_util.parse_time(time_str)
            ts = dt_util.find_next_time_expression_time(
                now, [time.second], [time.minute], [time.hour])
        else:
            # relative to sunrise/sunset
            sun = self.hass.states.get(const.SUN_ENTITY)
            if not sun:
                return None
            ts = None
            if (res.group(1) == const.SUN_EVENT_SUNRISE
                    and ATTR_NEXT_RISING in sun.attributes):
                ts = dt_util.parse_datetime(sun.attributes[ATTR_NEXT_RISING])
            elif (res.group(1) == const.SUN_EVENT_SUNSET
                  and ATTR_NEXT_SETTING in sun.attributes):
                ts = dt_util.parse_datetime(sun.attributes[ATTR_NEXT_SETTING])
            if not ts:
                return None
            ts = dt_util.as_local(ts)
            ts = ts.replace(second=0)
            time_sun = datetime.timedelta(hours=ts.hour,
                                          minutes=ts.minute,
                                          seconds=ts.second)
            offset = dt_util.parse_time(res.group(3))
            offset = datetime.timedelta(hours=offset.hour,
                                        minutes=offset.minute,
                                        seconds=offset.second)
            if res.group(2) == "-":
                if (time_sun - offset).total_seconds() >= 0:
                    ts = ts - offset
                else:
                    # prevent offset to shift the time past the extends of the day
                    ts = ts.replace(hour=0, minute=0, second=0)
            else:
                if (time_sun + offset).total_seconds() <= 86340:
                    ts = ts + offset
                else:
                    # prevent offset to shift the time past the extends of the day
                    ts = ts.replace(hour=23, minute=59, second=0)
            ts = dt_util.find_next_time_expression_time(
                now, [ts.second], [ts.minute], [ts.hour])

        time_delta = datetime.timedelta(seconds=1)

        if self.day_in_weekdays(ts) and ((ts - now).total_seconds() > 0
                                         or iteration > 0):

            if self._start_date and days_until_date(self._start_date, ts) > 0:
                # start date is more than a week in the future, jump to start date
                time_delta = datetime.timedelta(
                    days=days_until_date(self._start_date, ts))

            elif self._end_date and days_until_date(self._end_date, ts) < 0:
                # end date is in the past, jump to end date
                time_delta = datetime.timedelta(
                    days=days_until_date(self._end_date, ts))

            else:
                # date restrictions are met
                return ts

        # calculate next timestamp
        next_day = dt_util.find_next_time_expression_time(
            now + time_delta, [0], [0], [0])
        if iteration > 7:
            _LOGGER.warning(
                "failed to calculate next timeslot for schedule {}".format(
                    self.id))
            return None
        return self.calculate_timestamp(time_str, next_day, iteration + 1)
示例#58
0
    async def updating_devices(self, *_):
        """Find the current data from self.data."""
        if not self.data:
            return

        now = dt_util.utcnow()
        forecast_time = now + dt_util.dt.timedelta(hours=self._forecast)

        # Find the correct time entry. Since not all time entries contain all
        # types of data, we cannot just select one. Instead, we order  them by
        # distance from the desired forecast_time, and for every device iterate
        # them in order of increasing distance, taking the first time_point
        # that contains the desired data.

        ordered_entries = []

        for time_entry in self.data["product"]["time"]:
            valid_from = dt_util.parse_datetime(time_entry["@from"])
            valid_to = dt_util.parse_datetime(time_entry["@to"])

            if now >= valid_to:
                # Has already passed. Never select this.
                continue

            average_dist = abs(
                (valid_to - forecast_time).total_seconds()) + abs(
                    (valid_from - forecast_time).total_seconds())

            ordered_entries.append((average_dist, time_entry))

        ordered_entries.sort(key=lambda item: item[0])

        # Update all devices
        if ordered_entries:
            for dev in self.devices:
                new_state = None

                for (_, selected_time_entry) in ordered_entries:
                    loc_data = selected_time_entry["location"]

                    if dev.type not in loc_data:
                        continue

                    if dev.type == "precipitation":
                        new_state = loc_data[dev.type]["@value"]
                    elif dev.type == "symbol":
                        new_state = loc_data[dev.type]["@number"]
                    elif dev.type in (
                            "temperature",
                            "pressure",
                            "humidity",
                            "dewpointTemperature",
                    ):
                        new_state = loc_data[dev.type]["@value"]
                    elif dev.type in ("windSpeed", "windGust"):
                        new_state = loc_data[dev.type]["@mps"]
                    elif dev.type == "windDirection":
                        new_state = float(loc_data[dev.type]["@deg"])
                    elif dev.type in (
                            "fog",
                            "cloudiness",
                            "lowClouds",
                            "mediumClouds",
                            "highClouds",
                    ):
                        new_state = loc_data[dev.type]["@percent"]

                    break

                # pylint: disable=protected-access
                if new_state != dev._state:
                    dev._state = new_state
                    if dev.hass:
                        dev.async_write_ha_state()
示例#59
0
    def setpoints(self) -> dict[str, Any]:
        """Return the current/next setpoints from the schedule.

        Only Zones & DHW controllers (but not the TCS) can have schedules.
        """
        def _dt_evo_to_aware(dt_naive: dt, utc_offset: timedelta) -> dt:
            dt_aware = dt_naive.replace(tzinfo=dt_util.UTC) - utc_offset
            return dt_util.as_local(dt_aware)

        if not self._schedule or not self._schedule.get("DailySchedules"):
            return {}  # no scheduled setpoints when {'DailySchedules': []}

        day_time = dt_util.now()
        day_of_week = day_time.weekday()  # for evohome, 0 is Monday
        time_of_day = day_time.strftime("%H:%M:%S")

        try:
            # Iterate today's switchpoints until past the current time of day...
            day = self._schedule["DailySchedules"][day_of_week]
            sp_idx = -1  # last switchpoint of the day before
            for i, tmp in enumerate(day["Switchpoints"]):
                if time_of_day > tmp["TimeOfDay"]:
                    sp_idx = i  # current setpoint
                else:
                    break

            # Did the current SP start yesterday? Does the next start SP tomorrow?
            this_sp_day = -1 if sp_idx == -1 else 0
            next_sp_day = 1 if sp_idx + 1 == len(day["Switchpoints"]) else 0

            for key, offset, idx in (
                ("this", this_sp_day, sp_idx),
                ("next", next_sp_day, (sp_idx + 1) * (1 - next_sp_day)),
            ):
                sp_date = (day_time +
                           timedelta(days=offset)).strftime("%Y-%m-%d")
                day = self._schedule["DailySchedules"][(day_of_week + offset) %
                                                       7]
                switchpoint = day["Switchpoints"][idx]

                dt_aware = _dt_evo_to_aware(
                    dt_util.parse_datetime(
                        f"{sp_date}T{switchpoint['TimeOfDay']}"),
                    self._evo_broker.tcs_utc_offset,
                )

                self._setpoints[f"{key}_sp_from"] = dt_aware.isoformat()
                try:
                    self._setpoints[f"{key}_sp_temp"] = switchpoint[
                        "heatSetpoint"]
                except KeyError:
                    self._setpoints[f"{key}_sp_state"] = switchpoint[
                        "DhwState"]

        except IndexError:
            self._setpoints = {}
            _LOGGER.warning(
                "Failed to get setpoints, report as an issue if this error persists",
                exc_info=True,
            )

        return self._setpoints
示例#60
0
    async def get(self, request, datetime=None):
        """Retrieve logbook entries."""
        if datetime:
            datetime = dt_util.parse_datetime(datetime)

            if datetime is None:
                return self.json_message("Invalid datetime",
                                         HTTPStatus.BAD_REQUEST)
        else:
            datetime = dt_util.start_of_local_day()

        period = request.query.get("period")
        if period is None:
            period = 1
        else:
            period = int(period)

        entity_ids = request.query.get("entity")
        if entity_ids:
            try:
                entity_ids = cv.entity_ids(entity_ids)
            except vol.Invalid:
                raise InvalidEntityFormatError(
                    f"Invalid entity id(s) encountered: {entity_ids}. "
                    "Format should be <domain>.<object_id>") from vol.Invalid

        end_time = request.query.get("end_time")
        if end_time is None:
            start_day = dt_util.as_utc(datetime) - timedelta(days=period - 1)
            end_day = start_day + timedelta(days=period)
        else:
            start_day = datetime
            end_day = dt_util.parse_datetime(end_time)
            if end_day is None:
                return self.json_message("Invalid end_time",
                                         HTTPStatus.BAD_REQUEST)

        hass = request.app["hass"]

        entity_matches_only = "entity_matches_only" in request.query
        context_id = request.query.get("context_id")

        if entity_ids and context_id:
            return self.json_message("Can't combine entity with context_id",
                                     HTTPStatus.BAD_REQUEST)

        def json_events():
            """Fetch events and generate JSON."""
            return self.json(
                _get_events(
                    hass,
                    start_day,
                    end_day,
                    entity_ids,
                    self.filters,
                    self.entities_filter,
                    entity_matches_only,
                    context_id,
                ))

        return await hass.async_add_executor_job(json_events)