def update(self): """ Gets the latest data from opendata.ch. """ response = requests.get( _RESOURCE + "connections?" + "from=" + self.start + "&" + "to=" + self.destination + "&" + "fields[]=connections/from/departureTimestamp/&" + "fields[]=connections/", timeout=30, ) connections = response.json()["connections"][:2] try: self.times = [ dt_util.datetime_to_time_str( dt_util.as_local(dt_util.utc_from_timestamp(item["from"]["departureTimestamp"])) ) for item in connections ] self.times.append( dt_util.as_local(dt_util.utc_from_timestamp(connections[0]["from"]["departureTimestamp"])) - dt_util.as_local(dt_util.utcnow()) ) except KeyError: self.times = ["n/a"]
def update(self): """Get the latest data from opendata.ch.""" response = requests.get( _RESOURCE + 'connections?' + 'from=' + self.start + '&' + 'to=' + self.destination + '&' + 'fields[]=connections/from/departureTimestamp/&' + 'fields[]=connections/', timeout=10) connections = response.json()['connections'][:2] try: self.times = [ dt_util.as_local( dt_util.utc_from_timestamp( item['from']['departureTimestamp'])).strftime( TIME_STR_FORMAT) for item in connections ] self.times.append( dt_util.as_local( dt_util.utc_from_timestamp( connections[0]['from']['departureTimestamp'])) - dt_util.as_local(dt_util.utcnow())) except KeyError: self.times = ['n/a']
def update(self): """Get the latest system information.""" import psutil if self.type == 'disk_use_percent': self._state = psutil.disk_usage(self.argument).percent elif self.type == 'disk_use': self._state = round(psutil.disk_usage(self.argument).used / 1024**3, 1) elif self.type == 'disk_free': self._state = round(psutil.disk_usage(self.argument).free / 1024**3, 1) elif self.type == 'memory_use_percent': self._state = psutil.virtual_memory().percent elif self.type == 'memory_use': self._state = round((psutil.virtual_memory().total - psutil.virtual_memory().available) / 1024**2, 1) elif self.type == 'memory_free': self._state = round(psutil.virtual_memory().available / 1024**2, 1) elif self.type == 'swap_use_percent': self._state = psutil.swap_memory().percent elif self.type == 'swap_use': self._state = round(psutil.swap_memory().used / 1024**3, 1) elif self.type == 'swap_free': self._state = round(psutil.swap_memory().free / 1024**3, 1) elif self.type == 'processor_use': self._state = round(psutil.cpu_percent(interval=None)) elif self.type == 'process': if any(self.argument in l.name() for l in psutil.process_iter()): self._state = STATE_ON else: self._state = STATE_OFF elif self.type == 'network_out' or self.type == 'network_in': counters = psutil.net_io_counters(pernic=True) if self.argument in counters: counter = counters[self.argument][IO_COUNTER[self.type]] self._state = round(counter / 1024**2, 1) else: self._state = STATE_UNKNOWN elif self.type == 'packets_out' or self.type == 'packets_in': counters = psutil.net_io_counters(pernic=True) if self.argument in counters: self._state = counters[self.argument][IO_COUNTER[self.type]] else: self._state = STATE_UNKNOWN elif self.type == 'ipv4_address' or self.type == 'ipv6_address': addresses = psutil.net_if_addrs() if self.argument in addresses: self._state = addresses[self.argument][IF_ADDRS[self.type]][1] else: self._state = STATE_UNKNOWN elif self.type == 'last_boot': self._state = dt_util.as_local( dt_util.utc_from_timestamp(psutil.boot_time()) ).date().isoformat() elif self.type == 'since_last_boot': self._state = dt_util.utcnow() - dt_util.utc_from_timestamp( psutil.boot_time())
def row_to_state(row): """ Convert a database row to a state. """ try: return State( row[1], row[2], json.loads(row[3]), dt_util.utc_from_timestamp(row[4]), dt_util.utc_from_timestamp(row[5]) ) except ValueError: # When json.loads fails _LOGGER.exception("Error converting row to state: %s", row) return None
def update_period(self): """Parse the templates and store a datetime tuple in _period.""" start = None end = None # Parse start if self._start is not None: try: start_rendered = self._start.render() except (TemplateError, TypeError) as ex: HistoryStatsHelper.handle_template_exception(ex, 'start') return start = dt_util.parse_datetime(start_rendered) if start is None: try: start = dt_util.as_local(dt_util.utc_from_timestamp( math.floor(float(start_rendered)))) except ValueError: _LOGGER.error("Parsing error: start must be a datetime" "or a timestamp") return # Parse end if self._end is not None: try: end_rendered = self._end.render() except (TemplateError, TypeError) as ex: HistoryStatsHelper.handle_template_exception(ex, 'end') return end = dt_util.parse_datetime(end_rendered) if end is None: try: end = dt_util.as_local(dt_util.utc_from_timestamp( math.floor(float(end_rendered)))) except ValueError: _LOGGER.error("Parsing error: end must be a datetime " "or a timestamp") return # Calculate start or end using the duration if start is None: start = end - self._duration if end is None: end = start + self._duration if start > dt_util.now(): # History hasn't been written yet for this period return if dt_util.now() < end: # No point in making stats of the future end = dt_util.now() self._period = start, end
def update(self): """ Get the latest system informations. """ import psutil if self.type == 'disk_use_percent': self._state = psutil.disk_usage(self.argument).percent elif self.type == 'disk_use': self._state = round(psutil.disk_usage(self.argument).used / 1024**3, 1) elif self.type == 'disk_free': self._state = round(psutil.disk_usage(self.argument).free / 1024**3, 1) elif self.type == 'memory_use_percent': self._state = psutil.virtual_memory().percent elif self.type == 'memory_use': self._state = round((psutil.virtual_memory().total - psutil.virtual_memory().available) / 1024**2, 1) elif self.type == 'memory_free': self._state = round(psutil.virtual_memory().available / 1024**2, 1) elif self.type == 'swap_use_percent': self._state = psutil.swap_memory().percent elif self.type == 'swap_use': self._state = round(psutil.swap_memory().used / 1024**3, 1) elif self.type == 'swap_free': self._state = round(psutil.swap_memory().free / 1024**3, 1) elif self.type == 'processor_use': self._state = round(psutil.cpu_percent(interval=None)) elif self.type == 'process': if any(self.argument in l.name() for l in psutil.process_iter()): self._state = STATE_ON else: self._state = STATE_OFF elif self.type == 'network_out': self._state = round(psutil.net_io_counters(pernic=True) [self.argument][0] / 1024**2, 1) elif self.type == 'network_in': self._state = round(psutil.net_io_counters(pernic=True) [self.argument][1] / 1024**2, 1) elif self.type == 'packets_out': self._state = psutil.net_io_counters(pernic=True)[self.argument][2] elif self.type == 'packets_in': self._state = psutil.net_io_counters(pernic=True)[self.argument][3] elif self.type == 'ipv4_address': self._state = psutil.net_if_addrs()[self.argument][0][1] elif self.type == 'ipv6_address': self._state = psutil.net_if_addrs()[self.argument][1][1] elif self.type == 'last_boot': self._state = dt_util.datetime_to_date_str( dt_util.as_local( dt_util.utc_from_timestamp(psutil.boot_time()))) elif self.type == 'since_last_boot': self._state = dt_util.utcnow() - dt_util.utc_from_timestamp( psutil.boot_time())
def __init__(self, row=None): self.end = None if row is None: self.start = _INSTANCE.recording_start self.closed_incorrect = False else: self.start = date_util.utc_from_timestamp(row[1]) if row[2] is not None: self.end = date_util.utc_from_timestamp(row[2]) self.closed_incorrect = bool(row[3])
def test_intervals(self): """Test timing intervals of sensors.""" device = time_date.TimeDateSensor(self.hass, 'time') now = dt_util.utc_from_timestamp(45) next_time = device.get_next_interval(now) assert next_time == dt_util.utc_from_timestamp(60) device = time_date.TimeDateSensor(self.hass, 'date') now = dt_util.utc_from_timestamp(12345) next_time = device.get_next_interval(now) assert next_time == dt_util.utc_from_timestamp(86400) device = time_date.TimeDateSensor(self.hass, 'beat') now = dt_util.utc_from_timestamp(29) next_time = device.get_next_interval(now) assert next_time == dt_util.utc_from_timestamp(86.4) device = time_date.TimeDateSensor(self.hass, 'date_time') now = dt_util.utc_from_timestamp(1495068899) next_time = device.get_next_interval(now) assert next_time == dt_util.utc_from_timestamp(1495068900) now = dt_util.utcnow() device = time_date.TimeDateSensor(self.hass, 'time_date') next_time = device.get_next_interval() assert next_time > now
def device_state_attributes(self): """Return the state attributes of the DWD-Weather-Warnings.""" data = { ATTR_ATTRIBUTION: ATTRIBUTION, 'region_name': self._api.region_name } if self._api.region_id is not None: data['region_id'] = self._api.region_id if self._api.region_state is not None: data['region_state'] = self._api.region_state if self._api.data['time'] is not None: data['last_update'] = dt_util.as_local( dt_util.utc_from_timestamp(self._api.data['time'] / 1000)) if self._var_id == 'current_warning_level': prefix = 'current' elif self._var_id == 'advance_warning_level': prefix = 'advance' else: raise Exception('Unknown warning type') data['warning_count'] = self._api.data[prefix + '_warning_count'] i = 0 for event in self._api.data[prefix + '_warnings']: i = i + 1 data['warning_{}_name'.format(i)] = event['event'] data['warning_{}_level'.format(i)] = event['level'] data['warning_{}_type'.format(i)] = event['type'] if event['headline']: data['warning_{}_headline'.format(i)] = event['headline'] if event['description']: data['warning_{}_description'.format(i)] = event['description'] if event['instruction']: data['warning_{}_instruction'.format(i)] = event['instruction'] if event['start'] is not None: data['warning_{}_start'.format(i)] = dt_util.as_local( dt_util.utc_from_timestamp(event['start'] / 1000)) if event['end'] is not None: data['warning_{}_end'.format(i)] = dt_util.as_local( dt_util.utc_from_timestamp(event['end'] / 1000)) return data
def timestamp_utc(value): """Filter to convert given timestamp to UTC date/time.""" try: return dt_util.utc_from_timestamp(value).strftime(DATE_STR_FORMAT) except (ValueError, TypeError): # If timestamp can't be converted return value
def media_start_time(self): """Start time the program aired.""" if self._is_standby: return None return dt_util.as_local( dt_util.utc_from_timestamp(self._current['startTime']))
def device_state_attributes(self): """Return the state attributes of the device.""" attr = {} if self.vera_device.has_battery: attr[ATTR_BATTERY_LEVEL] = self.vera_device.battery_level + '%' if self.vera_device.is_armable: armed = self.vera_device.is_armed attr[ATTR_ARMED] = 'True' if armed else 'False' if self.vera_device.is_trippable: last_tripped = self.vera_device.last_trip if last_tripped is not None: utc_time = dt_util.utc_from_timestamp(int(last_tripped)) attr[ATTR_LAST_TRIP_TIME] = dt_util.datetime_to_str( utc_time) else: attr[ATTR_LAST_TRIP_TIME] = None tripped = self.vera_device.is_tripped attr[ATTR_TRIPPED] = 'True' if tripped else 'False' attr['Vera Device Id'] = self.vera_device.vera_device_id return attr
def device_state_attributes(self): """Return the state attributes of the device.""" attr = {} if self.vera_device.has_battery: attr[ATTR_BATTERY_LEVEL] = self.vera_device.battery_level + '%' if self.vera_device.is_armable: armed = self.vera_device.is_armed attr[ATTR_ARMED] = 'True' if armed else 'False' if self.vera_device.is_trippable: last_tripped = self.vera_device.last_trip if last_tripped is not None: utc_time = utc_from_timestamp(int(last_tripped)) attr[ATTR_LAST_TRIP_TIME] = utc_time.isoformat() else: attr[ATTR_LAST_TRIP_TIME] = None tripped = self.vera_device.is_tripped attr[ATTR_TRIPPED] = 'True' if tripped else 'False' power = self.vera_device.power if power: attr[ATTR_CURRENT_POWER_MWH] = convert(power, float, 0.0) * 1000 attr['Vera Device Id'] = self.vera_device.vera_device_id return attr
def device_state_attributes(self): """Return the state attributes of the sensor.""" attr = { ATTR_ATTRIBUTION: ATTRIBUTION, 'next_renewal': dt_util.utc_from_timestamp( self._data['info']['rinnovo']).isoformat(), 'italy_sent_sms': self._data['italy']['sms'], 'italy_over_plan_sms': self._data['italy']['sms_extra'], 'italy_sent_mms': self._data['italy']['mms'], 'italy_over_plan_mms': self._data['italy']['mms_extra'], 'italy_calls_seconds': self._data['italy']['chiamate'], 'italy_over_plan_calls': self._data['italy']['chiamate_extra'], 'italy_data': self._data['italy']['internet'], 'italy_data_max': self._data['italy']['internet_max'], 'italy_data_over_plan': self._data['italy']['internet_over'], 'abroad_sent_sms': self._data['estero']['sms'], 'abroad_over_plan_sms': self._data['estero']['sms_extra'], 'abroad_sent_mms': self._data['estero']['mms'], 'abroad_over_plan_mms': self._data['estero']['mms_extra'], 'abroad_calls_seconds': self._data['estero']['chiamate'], 'abroad_over_plan_calls': self._data['estero']['chiamate_extra'], 'abroad_data': self._data['estero']['internet'], 'abroad_data_max': self._data['estero']['internet_max'], 'abroad_data_over_plan': self._data['estero']['internet_over'], } return attr
def __init__(self, hass, config): """Initialize a proxy camera component.""" super().__init__() self.hass = hass self._proxied_camera = config.get(CONF_ENTITY_ID) self._name = ( config.get(CONF_NAME) or "{} - {}".format(DEFAULT_BASENAME, self._proxied_camera)) self._image_opts = ImageOpts( config.get(CONF_MAX_IMAGE_WIDTH), config.get(CONF_MAX_IMAGE_HEIGHT), config.get(CONF_IMAGE_LEFT), config.get(CONF_IMAGE_TOP), config.get(CONF_IMAGE_QUALITY), config.get(CONF_FORCE_RESIZE)) self._stream_opts = ImageOpts( config.get(CONF_MAX_STREAM_WIDTH), config.get(CONF_MAX_STREAM_HEIGHT), config.get(CONF_IMAGE_LEFT), config.get(CONF_IMAGE_TOP), config.get(CONF_STREAM_QUALITY), True) self._image_refresh_rate = config.get(CONF_IMAGE_REFRESH_RATE) self._cache_images = bool( config.get(CONF_IMAGE_REFRESH_RATE) or config.get(CONF_CACHE_IMAGES)) self._last_image_time = dt_util.utc_from_timestamp(0) self._last_image = None self._mode = config.get(CONF_MODE)
def _update(self): """Get the clients from the device.""" from pyunifi.controller import APIError try: clients = self._controller.get_clients() except APIError as ex: _LOGGER.error("Failed to scan clients: %s", ex) clients = [] # my_unifi - to also track ap's, switches, etc try: clients_ap = self._controller.get_aps() except APIError as ex2: _LOGGER.error("Failed to scan aps: %s", ex2) clients_ap = [] clients.extend(clients_ap) # Filter clients to provided SSID list if self._ssid_filter: clients = [client for client in clients if 'essid' in client and client['essid'] in self._ssid_filter] self._clients = { client['mac']: client for client in clients if (dt_util.utcnow() - dt_util.utc_from_timestamp(float( client['last_seen']))) < self._detection_time}
def get_time_until(departure_time=None): """Calculate the time between now and a train's departure time.""" if departure_time is None: return 0 delta = dt_util.utc_from_timestamp(int(departure_time)) - dt_util.now() return round((delta.total_seconds() / 60))
def row_to_event(row): """ Convert a databse row to an event. """ try: return Event(row[1], json.loads(row[2]), EventOrigin(row[3]), date_util.utc_from_timestamp(row[5])) except ValueError: # When json.loads fails _LOGGER.exception("Error converting row to event: %s", row) return None
def update_period(self): """Parse the templates and store a datetime tuple in _period.""" start = None end = None # Parse start if self._start is not None: try: start_rendered = self._start.render() except (TemplateError, TypeError) as ex: HistoryStatsHelper.handle_template_exception(ex, 'start') return start = dt_util.parse_datetime(start_rendered) if start is None: try: start = dt_util.as_local(dt_util.utc_from_timestamp( math.floor(float(start_rendered)))) except ValueError: _LOGGER.error('PARSING ERROR: start must be a datetime' ' or a timestamp.') return # Parse end if self._end is not None: try: end_rendered = self._end.render() except (TemplateError, TypeError) as ex: HistoryStatsHelper.handle_template_exception(ex, 'end') return end = dt_util.parse_datetime(end_rendered) if end is None: try: end = dt_util.as_local(dt_util.utc_from_timestamp( math.floor(float(end_rendered)))) except ValueError: _LOGGER.error('PARSING ERROR: end must be a datetime' ' or a timestamp.') return # Calculate start or end using the duration if start is None: start = end - self._duration if end is None: end = start + self._duration self._period = start, end
def __init__(self, point_client, device_id, device_class): """Initialize the entity.""" self._async_unsub_dispatcher_connect = None self._client = point_client self._id = device_id self._name = self.device.name self._device_class = device_class self._updated = utc_from_timestamp(0) self._value = None
def _update(self): """Get the latest data from MagicSeaweed.""" try: forecasts = self._msw.get_future() self.currently = forecasts.data[0] for forecast in forecasts.data[:8]: hour = dt_util.utc_from_timestamp( forecast.localTimestamp).strftime("%-I%p") self.hourly[hour] = forecast except ConnectionError: _LOGGER.error("Unable to retrieve data from Magicseaweed")
def timestamp_custom(value, date_format=DATE_STR_FORMAT, local=True): """Filter to convert given timestamp to format.""" try: date = dt_util.utc_from_timestamp(value) if local: date = dt_util.as_local(date) return date.strftime(date_format) except (ValueError, TypeError): # If timestamp can't be converted return value
def test_as_timestamp(self): """Test as_timestamp method.""" ts = 1462401234 utc_dt = dt_util.utc_from_timestamp(ts) assert ts == dt_util.as_timestamp(utc_dt) utc_iso = utc_dt.isoformat() assert ts == dt_util.as_timestamp(utc_iso) # confirm the ability to handle a string passed in delta = dt_util.as_timestamp("2016-01-01 12:12:12") delta -= dt_util.as_timestamp("2016-01-01 12:12:11") assert 1 == delta
def test_timezone_intervals(self): """Test date sensor behavior in a timezone besides UTC.""" new_tz = dt_util.get_time_zone('America/New_York') assert new_tz is not None dt_util.set_default_time_zone(new_tz) device = time_date.TimeDateSensor(self.hass, 'date') now = dt_util.utc_from_timestamp(50000) next_time = device.get_next_interval(now) # start of local day in EST was 18000.0 # so the second day was 18000 + 86400 assert next_time.timestamp() == 104400
async def async_update(self): """Update the data.""" await self._client.update_alarms(self._host) data = self.hass.data[GOOGLEHOME_DOMAIN][self._host] alarms = data.get('alarms')[self._condition] if not alarms: self._available = False return self._available = True time_date = dt_util.utc_from_timestamp(min(element['fire_time'] for element in alarms) / 1000) self._state = time_date.isoformat()
def _update(self): """Get the clients from the device.""" from pyunifi.controller import APIError try: clients = self._controller.get_clients() except APIError as ex: _LOGGER.error("Failed to scan clients: %s", ex) clients = [] self._clients = { client['mac']: client for client in clients if (dt_util.utcnow() - dt_util.utc_from_timestamp(float( client['last_seen']))) < self._detection_time}
def test_states(self): """Test states of sensors.""" now = dt_util.utc_from_timestamp(1495068856) device = time_date.TimeDateSensor(self.hass, 'time') device._update_internal_state(now) assert device.state == "00:54" device = time_date.TimeDateSensor(self.hass, 'date') device._update_internal_state(now) assert device.state == "2017-05-18" device = time_date.TimeDateSensor(self.hass, 'time_utc') device._update_internal_state(now) assert device.state == "00:54" device = time_date.TimeDateSensor(self.hass, 'beat') device._update_internal_state(now) assert device.state == "@079"
def get_extra_attributes(self, device): """Return the extra attributes of the device.""" if not self._monitored_conditions: return {} client = self._clients.get(device, {}) attributes = {} for variable in self._monitored_conditions: if variable in client: if variable in TIMESTAMP_ATTRS: attributes[variable] = dt_util.utc_from_timestamp( float(client[variable]) ) else: attributes[variable] = client[variable] _LOGGER.debug("Device mac %s attributes %s", device, attributes) return attributes
def state_attributes(self): attr = {} if self.vera_device.has_battery: attr[ATTR_BATTERY_LEVEL] = self.vera_device.battery_level + '%' if self.vera_device.is_armable: armed = self.vera_device.refresh_value('Armed') attr[ATTR_ARMED] = 'True' if armed == '1' else 'False' if self.vera_device.is_trippable: last_tripped = self.vera_device.refresh_value('LastTrip') if last_tripped is not None: utc_time = dt_util.utc_from_timestamp(int(last_tripped)) attr[ATTR_LAST_TRIP_TIME] = dt_util.datetime_to_str( utc_time) else: attr[ATTR_LAST_TRIP_TIME] = None tripped = self.vera_device.refresh_value('Tripped') attr[ATTR_TRIPPED] = 'True' if tripped == '1' else 'False' attr['Vera Device Id'] = self.vera_device.vera_device_id return attr
def update(self): """ Gets the latest data from opendata.ch. """ response = get( _RESOURCE + 'connections?' + 'from=' + self.start + '&' + 'to=' + self.destination + '&' + 'fields[]=connections/from/departureTimestamp/&' + 'fields[]=connections/') connections = response.json()['connections'][:2] try: return [ dt_util.datetime_to_time_str( dt_util.as_local(dt_util.utc_from_timestamp( item['from']['departureTimestamp'])) ) for item in connections ] except KeyError: return ['n/a']
"platform": "yandex_transport", "stop_id": 9639579, "routes": ROUTES, "name": NAME, } } FILTERED_ATTRS = { "т36": ["18:25", "18:42", "18:46"], "т47": ["18:35", "18:37", "18:40", "18:42"], "м10": ["18:20", "18:27", "18:29", "18:41", "18:43"], "stop_name": "7-й автобусный парк", "attribution": "Data provided by maps.yandex.ru", } RESULT_STATE = dt_util.utc_from_timestamp(1583421540).isoformat( timespec="seconds") async def assert_setup_sensor(hass, config, count=1): """Set up the sensor and assert it's been created.""" with assert_setup_component(count): assert await async_setup_component(hass, sensor.DOMAIN, config) await hass.async_block_till_done() async def test_setup_platform_valid_config(hass, mock_requester): """Test that sensor is set up properly with valid config.""" await assert_setup_sensor(hass, TEST_CONFIG) async def test_setup_platform_invalid_config(hass, mock_requester):
def test_utc_from_timestamp(self): """Test utc_from_timestamp method.""" self.assertEqual( datetime(1986, 7, 9, tzinfo=dt_util.UTC), dt_util.utc_from_timestamp(521251200))
def update(self): """Get the latest system information.""" import psutil if self.type == 'disk_use_percent': self._state = psutil.disk_usage(self.argument).percent elif self.type == 'disk_use': self._state = round(psutil.disk_usage(self.argument).used / 1024**3, 1) elif self.type == 'disk_free': self._state = round(psutil.disk_usage(self.argument).free / 1024**3, 1) elif self.type == 'memory_use_percent': self._state = psutil.virtual_memory().percent elif self.type == 'memory_use': self._state = round((psutil.virtual_memory().total - psutil.virtual_memory().available) / 1024**2, 1) elif self.type == 'memory_free': self._state = round(psutil.virtual_memory().available / 1024**2, 1) elif self.type == 'swap_use_percent': self._state = psutil.swap_memory().percent elif self.type == 'swap_use': self._state = round(psutil.swap_memory().used / 1024**3, 1) elif self.type == 'swap_free': self._state = round(psutil.swap_memory().free / 1024**3, 1) elif self.type == 'processor_use': self._state = round(psutil.cpu_percent(interval=None)) elif self.type == 'process': for proc in psutil.process_iter(): try: if self.argument == proc.name(): self._state = STATE_ON return except psutil.NoSuchProcess as err: _LOGGER.warning( "Failed to load process with id: %s, old name: %s", err.pid, err.name) self._state = STATE_OFF elif self.type == 'network_out' or self.type == 'network_in': counters = psutil.net_io_counters(pernic=True) if self.argument in counters: counter = counters[self.argument][IO_COUNTER[self.type]] self._state = round(counter / 1024**2, 1) else: self._state = STATE_UNKNOWN elif self.type == 'packets_out' or self.type == 'packets_in': counters = psutil.net_io_counters(pernic=True) if self.argument in counters: self._state = counters[self.argument][IO_COUNTER[self.type]] else: self._state = STATE_UNKNOWN elif self.type == 'ipv4_address' or self.type == 'ipv6_address': addresses = psutil.net_if_addrs() if self.argument in addresses: self._state = addresses[self.argument][IF_ADDRS[self.type]][1] else: self._state = STATE_UNKNOWN elif self.type == 'last_boot': self._state = dt_util.as_local( dt_util.utc_from_timestamp(psutil.boot_time()) ).date().isoformat() elif self.type == 'since_last_boot': self._state = dt_util.utcnow() - dt_util.utc_from_timestamp( psutil.boot_time()) elif self.type == 'load_1m': self._state = os.getloadavg()[0] elif self.type == 'load_5m': self._state = os.getloadavg()[1] elif self.type == 'load_15m': self._state = os.getloadavg()[2]
"platform": "yandex_transport", "stop_id": 9639579, "routes": ROUTES, "name": NAME, } } FILTERED_ATTRS = { "т36": ["16:10", "16:17", "16:26"], "т47": ["16:09", "16:10"], "м10": ["16:12", "16:20"], "stop_name": "7-й автобусный парк", "attribution": "Data provided by maps.yandex.ru", } RESULT_STATE = dt_util.utc_from_timestamp(1570972183).isoformat(timespec="seconds") async def assert_setup_sensor(hass, config, count=1): """Set up the sensor and assert it's been created.""" with assert_setup_component(count): assert await async_setup_component(hass, sensor.DOMAIN, config) async def test_setup_platform_valid_config(hass, mock_requester): """Test that sensor is set up properly with valid config.""" await assert_setup_sensor(hass, TEST_CONFIG) async def test_setup_platform_invalid_config(hass, mock_requester): """Check an invalid configuration."""
def _get_file_mtime(self, event) -> Optional[datetime]: image_path = self._controller.history_image_path(event) try: return dt_util.utc_from_timestamp(path.getmtime(image_path)) except OSError: return None
async def async_update(self) -> None: """Update an entity's state data.""" if "_state" in self._device.data: # only via v3 API self._last_comms = dt_util.utc_from_timestamp( self._device.data["_state"]["lastComms"] )
def _async_device_as_dict(hass: HomeAssistant, device: TuyaDevice) -> dict[str, Any]: """Represent a Tuya device as a dictionary.""" # Base device information, without sensitive information. data = { "name": device.model, "model": device.model, "category": device.category, "product_id": device.product_id, "product_name": device.product_name, "online": device.online, "sub": device.sub, "time_zone": device.time_zone, "active_time": dt_util.utc_from_timestamp(device.active_time).isoformat(), "create_time": dt_util.utc_from_timestamp(device.create_time).isoformat(), "update_time": dt_util.utc_from_timestamp(device.update_time).isoformat(), "function": {}, "status_range": {}, "status": {}, "home_assistant": {}, } # Gather Tuya states for dpcode, value in device.status.items(): # These statuses may contain sensitive information, redact these.. if dpcode in {DPCode.ALARM_MESSAGE, DPCode.MOVEMENT_DETECT_PIC}: data["status"][dpcode] = REDACTED continue with suppress(ValueError, TypeError): value = json.loads(value) data["status"][dpcode] = value # Gather Tuya functions for function in device.function.values(): value = function.values with suppress(ValueError, TypeError, AttributeError): value = json.loads(cast(str, function.values)) data["function"][function.code] = { "type": function.type, "value": value, } # Gather Tuya status ranges for status_range in device.status_range.values(): value = status_range.values with suppress(ValueError, TypeError, AttributeError): value = json.loads(status_range.values) data["status_range"][status_range.code] = { "type": status_range.type, "value": value, } # Gather information how this Tuya device is represented in Home Assistant device_registry = dr.async_get(hass) entity_registry = er.async_get(hass) hass_device = device_registry.async_get_device(identifiers={(DOMAIN, device.id)}) if hass_device: data["home_assistant"] = { "name": hass_device.name, "name_by_user": hass_device.name_by_user, "disabled": hass_device.disabled, "disabled_by": hass_device.disabled_by, "entities": [], } hass_entities = er.async_entries_for_device( entity_registry, device_id=hass_device.id, include_disabled_entities=True, ) for entity_entry in hass_entities: state = hass.states.get(entity_entry.entity_id) state_dict = None if state: state_dict = state.as_dict() # Redact the `entity_picture` attribute as it contains a token. if "entity_picture" in state_dict["attributes"]: state_dict["attributes"] = { **state_dict["attributes"], "entity_picture": REDACTED, } # The context doesn't provide useful information in this case. state_dict.pop("context", None) data["home_assistant"]["entities"].append({ "disabled": entity_entry.disabled, "disabled_by": entity_entry.disabled_by, "entity_category": entity_entry.entity_category, "device_class": entity_entry.device_class, "original_device_class": entity_entry.original_device_class, "icon": entity_entry.icon, "original_icon": entity_entry.original_icon, "unit_of_measurement": entity_entry.unit_of_measurement, "state": state_dict, }) return data
OVERVIEW_UPDATE_DELAY = timedelta(minutes=15) DETAILS_UPDATE_DELAY = timedelta(hours=12) INVENTORY_UPDATE_DELAY = timedelta(hours=12) POWER_FLOW_UPDATE_DELAY = timedelta(minutes=15) ENERGY_DETAILS_DELAY = timedelta(minutes=15) SCAN_INTERVAL = timedelta(minutes=15) # Supported overview sensors SENSOR_TYPES = [ SolarEdgeSensor( key="lifetime_energy", json_key="lifeTimeData", name="Lifetime energy", icon="mdi:solar-power", last_reset=dt_util.utc_from_timestamp(0), state_class=STATE_CLASS_MEASUREMENT, unit_of_measurement=ENERGY_WATT_HOUR, ), SolarEdgeSensor( key="energy_this_year", json_key="lastYearData", name="Energy this year", entity_registry_enabled_default=False, icon="mdi:solar-power", unit_of_measurement=ENERGY_WATT_HOUR, ), SolarEdgeSensor( key="energy_this_month", json_key="lastMonthData", name="Energy this month",
def _update_period(self): # pylint: disable=r0912 """Parse the templates and calculate a datetime tuples.""" start = end = None now = dt_util.now() # Parse start _LOGGER.debug("Process start template: %s", self._start_template) if self._start_template is not None: try: start_rendered = self._start_template.render() except (TemplateError, TypeError) as ex: self.handle_template_exception(ex, "start") return if isinstance(start_rendered, str): start = dt_util.parse_datetime(start_rendered) if start is None: try: start = dt_util.as_local( dt_util.utc_from_timestamp( math.floor(float(start_rendered)))) except ValueError: _LOGGER.error("Parsing error: start must be a datetime" "or a timestamp") return # Parse end _LOGGER.debug("Process end template: %s", self._end_template) if self._end_template is not None: try: end_rendered = self._end_template.render() except (TemplateError, TypeError) as ex: self.handle_template_exception(ex, "end") return if isinstance(end_rendered, str): end = dt_util.parse_datetime(end_rendered) if end is None: try: end = dt_util.as_local( dt_util.utc_from_timestamp( math.floor(float(end_rendered)))) except ValueError: _LOGGER.error("Parsing error: end must be a datetime " "or a timestamp") return # Calculate start or end using the duration _LOGGER.debug("Process duration: %s", self._duration) if self._duration is not None: if start is None: if end is None: end = now start = end - self._duration else: end = start + self._duration _LOGGER.debug("Start: %s, End: %s", start, end) if start is None or end is None: return if start > now: # History hasn't been written yet for this period return if now < end: # No point in making stats of the future end = now self._period = start, end self.start = start.replace(microsecond=0).isoformat() self.end = end.replace(microsecond=0).isoformat()
def ts_to_dt(timestamp): """Turn a datetime into an integer for in the DB.""" if timestamp is None: return None return dt_util.utc_from_timestamp(timestamp)
def test_utc_from_timestamp(): """Test utc_from_timestamp method.""" assert datetime(1986, 7, 9, tzinfo=dt_util.UTC) == dt_util.utc_from_timestamp( 521251200 )
def media_position_updated_at(self) -> dt.datetime | None: """When was the position of the current playing media valid.""" if not self._currently_playing: return None return utc_from_timestamp(self._currently_playing["timestamp"] / 1000)
def ts_to_dt(timestamp: Optional[float]) -> Optional[datetime]: """Turn a datetime into an integer for in the DB.""" if timestamp is None: return None return dt_util.utc_from_timestamp(timestamp)
def _update( # noqa: C901 type_: str, data: SensorData) -> tuple[str | None, str | None, datetime.datetime | None]: """Get the latest system information.""" state = None value = None update_time = None if type_ == "disk_use_percent": state = _disk_usage(data.argument).percent elif type_ == "disk_use": state = round(_disk_usage(data.argument).used / 1024**3, 1) elif type_ == "disk_free": state = round(_disk_usage(data.argument).free / 1024**3, 1) elif type_ == "memory_use_percent": state = _virtual_memory().percent elif type_ == "memory_use": virtual_memory = _virtual_memory() state = round( (virtual_memory.total - virtual_memory.available) / 1024**2, 1) elif type_ == "memory_free": state = round(_virtual_memory().available / 1024**2, 1) elif type_ == "swap_use_percent": state = _swap_memory().percent elif type_ == "swap_use": state = round(_swap_memory().used / 1024**2, 1) elif type_ == "swap_free": state = round(_swap_memory().free / 1024**2, 1) elif type_ == "processor_use": state = round(psutil.cpu_percent(interval=None)) elif type_ == "processor_temperature": state = _read_cpu_temperature() elif type_ == "process": state = STATE_OFF for proc in psutil.process_iter(): try: if data.argument == proc.name(): state = STATE_ON break except psutil.NoSuchProcess as err: _LOGGER.warning( "Failed to load process with ID: %s, old name: %s", err.pid, err.name, ) elif type_ in ("network_out", "network_in"): counters = _net_io_counters() if data.argument in counters: counter = counters[data.argument][IO_COUNTER[type_]] state = round(counter / 1024**2, 1) else: state = None elif type_ in ("packets_out", "packets_in"): counters = _net_io_counters() if data.argument in counters: state = counters[data.argument][IO_COUNTER[type_]] else: state = None elif type_ in ("throughput_network_out", "throughput_network_in"): counters = _net_io_counters() if data.argument in counters: counter = counters[data.argument][IO_COUNTER[type_]] now = dt_util.utcnow() if data.value and data.value < counter: state = round( (counter - data.value) / 1000**2 / (now - (data.update_time or now)).total_seconds(), 3, ) else: state = None update_time = now value = counter else: state = None elif type_ in ("ipv4_address", "ipv6_address"): addresses = _net_if_addrs() if data.argument in addresses: for addr in addresses[data.argument]: if addr.family == IF_ADDRS_FAMILY[type_]: state = addr.address else: state = None elif type_ == "last_boot": # Only update on initial setup if data.state is None: state = dt_util.utc_from_timestamp(psutil.boot_time()).isoformat() else: state = data.state elif type_ == "load_1m": state = round(_getloadavg()[0], 2) elif type_ == "load_5m": state = round(_getloadavg()[1], 2) elif type_ == "load_15m": state = round(_getloadavg()[2], 2) return state, value, update_time
"gas_daily_cost": { ATTR_NAME: "Gas Cost Today", ATTR_SECTION: "gas_usage", ATTR_MEASUREMENT: "day_cost", ATTR_UNIT_OF_MEASUREMENT: CURRENCY_EUR, ATTR_ICON: "mdi:gas-cylinder", }, "gas_meter_reading": { ATTR_NAME: "Gas Meter", ATTR_SECTION: "gas_usage", ATTR_MEASUREMENT: "meter", ATTR_UNIT_OF_MEASUREMENT: VOLUME_CUBIC_METERS, ATTR_ICON: "mdi:gas-cylinder", ATTR_STATE_CLASS: STATE_CLASS_MEASUREMENT, ATTR_DEVICE_CLASS: DEVICE_CLASS_GAS, ATTR_LAST_RESET: dt_util.utc_from_timestamp(0), ATTR_DEFAULT_ENABLED: False, }, "gas_value": { ATTR_NAME: "Current Gas Usage", ATTR_SECTION: "gas_usage", ATTR_MEASUREMENT: "current", ATTR_UNIT_OF_MEASUREMENT: VOLUME_CM3, ATTR_ICON: "mdi:gas-cylinder", }, "power_average": { ATTR_NAME: "Average Power Usage", ATTR_SECTION: "power_usage", ATTR_MEASUREMENT: "average", ATTR_UNIT_OF_MEASUREMENT: POWER_WATT, ATTR_DEVICE_CLASS: DEVICE_CLASS_POWER,
def get_ride_duration(departure_time, arrival_time, delay=0): """Calculate the total travel time in minutes.""" duration = dt_util.utc_from_timestamp( int(arrival_time)) - dt_util.utc_from_timestamp(int(departure_time)) duration_time = int(round((duration.total_seconds() / 60))) return duration_time + get_delay_in_minutes(delay)
def _update_info(self, entity_id, old_state, new_state, init=False): if new_state is None: return with self._lock: # Get time device was last seen, which is the entity's last_seen # attribute, or if that doesn't exist, then last_updated from the # new state object. Make sure last_seen is timezone aware in UTC. # Note that dt_util.as_utc assumes naive datetime is in local # timezone. last_seen = new_state.attributes.get(ATTR_LAST_SEEN) if isinstance(last_seen, datetime): last_seen = dt_util.as_utc(last_seen) else: try: last_seen = dt_util.utc_from_timestamp(float(last_seen)) except (TypeError, ValueError): last_seen = new_state.last_updated # Is this newer info than last update? if self._prev_seen and last_seen <= self._prev_seen: _LOGGER.debug( 'For {} skipping update from {}: ' 'last_seen not newer than previous update ({} <= {})'. format(self._entity_id, entity_id, last_seen, self._prev_seen)) return # Try to get GPS and battery data. try: gps = (new_state.attributes[ATTR_LATITUDE], new_state.attributes[ATTR_LONGITUDE]) except KeyError: gps = None gps_accuracy = new_state.attributes.get(ATTR_GPS_ACCURACY) battery = new_state.attributes.get( ATTR_BATTERY, new_state.attributes.get(ATTR_BATTERY_LEVEL)) charging = new_state.attributes.get( ATTR_BATTERY_CHARGING, new_state.attributes.get(ATTR_CHARGING)) # Don't use location_name unless we have to. location_name = None # What type of tracker is this? if new_state.domain == BS_DOMAIN: source_type = SOURCE_TYPE_BINARY_SENSOR else: source_type = new_state.attributes.get(ATTR_SOURCE_TYPE) state = new_state.state if source_type == SOURCE_TYPE_GPS: # GPS coordinates and accuracy are required. if gps is None: self._bad_entity(entity_id, 'missing gps attributes', init) return if gps_accuracy is None: self._bad_entity(entity_id, 'missing gps_accuracy attribute', init) return self._good_entity(entity_id, SOURCE_TYPE_GPS, state) elif source_type in SOURCE_TYPE_NON_GPS: # Convert 'on'/'off' state of binary_sensor # to 'home'/'not_home'. if source_type == SOURCE_TYPE_BINARY_SENSOR: if state == STATE_BINARY_SENSOR_HOME: state = STATE_HOME else: state = STATE_NOT_HOME self._good_entity(entity_id, source_type, state) if not self._use_non_gps_data(state): return # Don't use new GPS data if it's not complete. if gps is None or gps_accuracy is None: gps = gps_accuracy = None # Get current GPS data, if any, and determine if it is in # 'zone.home'. cur_state = self._hass.states.get(self._entity_id) try: cur_lat = cur_state.attributes[ATTR_LATITUDE] cur_lon = cur_state.attributes[ATTR_LONGITUDE] cur_acc = cur_state.attributes[ATTR_GPS_ACCURACY] cur_gps_is_home = (active_zone( self._hass, cur_lat, cur_lon, cur_acc).entity_id == ENTITY_ID_HOME) except (AttributeError, KeyError): cur_gps_is_home = False # It's important, for this composite tracker, to avoid the # component level code's "stale processing." This can be done # one of two ways: 1) provide GPS data w/ source_type of gps, # or 2) provide a location_name (that will be used as the new # state.) # If router entity's state is 'home' and current GPS data from # composite entity is available and is in 'zone.home', # use it and make source_type gps. if state == STATE_HOME and cur_gps_is_home: gps = cur_lat, cur_lon gps_accuracy = cur_acc source_type = SOURCE_TYPE_GPS # Otherwise, if new GPS data is valid (which is unlikely if # new state is not 'home'), # use it and make source_type gps. elif gps: source_type = SOURCE_TYPE_GPS # Otherwise, don't use any GPS data, but set location_name to # new state. else: location_name = state else: self._bad_entity( entity_id, 'unsupported source_type: {}'.format(source_type), init) return tz = None if self._time_as in [TZ_DEVICE_UTC, TZ_DEVICE_LOCAL]: tzname = None if gps: # timezone_at will return a string or None. tzname = self._tf.timezone_at(lng=gps[1], lat=gps[0]) # get_time_zone will return a tzinfo or None. tz = dt_util.get_time_zone(tzname) attrs = {ATTR_TIME_ZONE: tzname or STATE_UNKNOWN} else: attrs = {} attrs.update({ ATTR_ENTITY_ID: tuple(entity_id for entity_id, entity in self._entities.items() if entity[ATTR_SOURCE_TYPE] is not None), ATTR_LAST_ENTITY_ID: entity_id, ATTR_LAST_SEEN: self._dt_attr_from_utc(last_seen.replace(microsecond=0), tz) }) if charging is not None: attrs[ATTR_BATTERY_CHARGING] = charging self._see(dev_id=self._dev_id, location_name=location_name, gps=gps, gps_accuracy=gps_accuracy, battery=battery, attributes=attrs, source_type=source_type) self._prev_seen = last_seen
async def test_timestamp(hass): """Test timestamp.""" try: dt_util.set_default_time_zone(dt_util.get_time_zone("America/Los_Angeles")) assert await async_setup_component( hass, DOMAIN, { DOMAIN: { "test_datetime_initial_with_tz": { "has_time": True, "has_date": True, "initial": "2020-12-13 10:00:00+01:00", }, "test_datetime_initial_without_tz": { "has_time": True, "has_date": True, "initial": "2020-12-13 10:00:00", }, "test_time_initial": { "has_time": True, "has_date": False, "initial": "10:00:00", }, } }, ) # initial has been converted to the set timezone state_with_tz = hass.states.get("input_datetime.test_datetime_initial_with_tz") assert state_with_tz is not None # Timezone LA is UTC-8 => timestamp carries +01:00 => delta is -9 => 10:00 - 09:00 => 01:00 assert state_with_tz.state == "2020-12-13 01:00:00" assert ( dt_util.as_local( dt_util.utc_from_timestamp(state_with_tz.attributes[ATTR_TIMESTAMP]) ).strftime(FMT_DATETIME) == "2020-12-13 01:00:00" ) # initial has been interpreted as being part of set timezone state_without_tz = hass.states.get( "input_datetime.test_datetime_initial_without_tz" ) assert state_without_tz is not None assert state_without_tz.state == "2020-12-13 10:00:00" # Timezone LA is UTC-8 => timestamp has no zone (= assumed local) => delta to UTC is +8 => 10:00 + 08:00 => 18:00 assert ( dt_util.utc_from_timestamp( state_without_tz.attributes[ATTR_TIMESTAMP] ).strftime(FMT_DATETIME) == "2020-12-13 18:00:00" ) assert ( dt_util.as_local( dt_util.utc_from_timestamp(state_without_tz.attributes[ATTR_TIMESTAMP]) ).strftime(FMT_DATETIME) == "2020-12-13 10:00:00" ) # Use datetime.datetime.fromtimestamp assert ( dt_util.as_local( datetime.datetime.fromtimestamp( state_without_tz.attributes[ATTR_TIMESTAMP], datetime.timezone.utc ) ).strftime(FMT_DATETIME) == "2020-12-13 10:00:00" ) # Test initial time sets timestamp correctly. state_time = hass.states.get("input_datetime.test_time_initial") assert state_time is not None assert state_time.state == "10:00:00" assert state_time.attributes[ATTR_TIMESTAMP] == 10 * 60 * 60 # Test that setting the timestamp of an entity works. await hass.services.async_call( DOMAIN, "set_datetime", { ATTR_ENTITY_ID: "input_datetime.test_datetime_initial_with_tz", ATTR_TIMESTAMP: state_without_tz.attributes[ATTR_TIMESTAMP], }, blocking=True, ) state_with_tz_updated = hass.states.get( "input_datetime.test_datetime_initial_with_tz" ) assert state_with_tz_updated.state == "2020-12-13 10:00:00" assert ( state_with_tz_updated.attributes[ATTR_TIMESTAMP] == state_without_tz.attributes[ATTR_TIMESTAMP] ) finally: dt_util.set_default_time_zone(ORIG_TIMEZONE)
def epoch_to_utc(self): last_update_time_1 = str(self.__sensor.motion_state_ts) last_update_time_2 = last_update_time_1[:-3] last_update_time_3 = int(last_update_time_2) return dt_util.utc_from_timestamp(float(last_update_time_3))
def last_updated(self): """Returns date when it was last updated.""" if isinstance(self._last_updated, int): return utc_from_timestamp(self._last_updated)
def state(self) -> datetime.datetime: """Return the state of the sensor.""" return dt.as_local( dt.utc_from_timestamp(dt.utcnow().timestamp() + self._next[self._sensor_property] / 1000)) if self._next else 'None'
class Life360DataUpdateCoordinator(DataUpdateCoordinator[Life360Data]): """Life360 data update coordinator.""" config_entry: ConfigEntry def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: """Initialize data update coordinator.""" super().__init__( hass, LOGGER, name=f"{DOMAIN} ({entry.unique_id})", update_interval=UPDATE_INTERVAL, ) self._hass = hass self._api = Life360( timeout=COMM_TIMEOUT, max_retries=COMM_MAX_RETRIES, authorization=entry.data[CONF_AUTHORIZATION], ) self._missing_loc_reason = hass.data[DOMAIN].missing_loc_reason async def _retrieve_data(self, func: str, *args: Any) -> list[dict[str, Any]]: """Get data from Life360.""" try: return await self._hass.async_add_executor_job( getattr(self._api, func), *args ) except LoginError as exc: LOGGER.debug("Login error: %s", exc) raise ConfigEntryAuthFailed from exc except Life360Error as exc: LOGGER.debug("%s: %s", exc.__class__.__name__, exc) raise UpdateFailed from exc async def _async_update_data(self) -> Life360Data: """Get & process data from Life360.""" data = Life360Data() for circle in await self._retrieve_data("get_circles"): circle_id = circle["id"] circle_members = await self._retrieve_data("get_circle_members", circle_id) circle_places = await self._retrieve_data("get_circle_places", circle_id) data.circles[circle_id] = Life360Circle( circle["name"], { place["id"]: Life360Place( place["name"], float(place["latitude"]), float(place["longitude"]), float(place["radius"]), ) for place in circle_places }, ) for member in circle_members: # Member isn't sharing location. if not int(member["features"]["shareLocation"]): continue member_id = member["id"] first = member["firstName"] last = member["lastName"] if first and last: name = " ".join([first, last]) else: name = first or last cur_missing_reason = self._missing_loc_reason.get(member_id) # Check if location information is missing. This can happen if server # has not heard from member's device in a long time (e.g., has been off # for a long time, or has lost service, etc.) if loc := member["location"]: with suppress(KeyError): del self._missing_loc_reason[member_id] else: if explicit_reason := member["issues"]["title"]: if extended_reason := member["issues"]["dialog"]: explicit_reason += f": {extended_reason}" # Note that different Circles can report missing location in # different ways. E.g., one might report an explicit reason and # another does not. If a vague reason has already been logged but a # more explicit reason is now available, log that, too. if ( cur_missing_reason is None or cur_missing_reason == MissingLocReason.VAGUE_ERROR_REASON and explicit_reason ): if explicit_reason: self._missing_loc_reason[ member_id ] = MissingLocReason.EXPLICIT_ERROR_REASON err_msg = explicit_reason else: self._missing_loc_reason[ member_id ] = MissingLocReason.VAGUE_ERROR_REASON err_msg = "Location information missing" LOGGER.error("%s: %s", name, err_msg) continue # Note that member may be in more than one circle. If that's the case # just go ahead and process the newly retrieved data (overwriting the # older data), since it might be slightly newer than what was retrieved # while processing another circle. place = loc["name"] or None if place: address: str | None = place else: address1 = loc["address1"] or None address2 = loc["address2"] or None if address1 and address2: address = ", ".join([address1, address2]) else: address = address1 or address2 speed = max(0, float(loc["speed"]) * SPEED_FACTOR_MPH) if self._hass.config.units.is_metric: speed = convert(speed, LENGTH_MILES, LENGTH_KILOMETERS) data.members[member_id] = Life360Member( address, dt_util.utc_from_timestamp(int(loc["since"])), bool(int(loc["charge"])), int(float(loc["battery"])), bool(int(loc["isDriving"])), member["avatar"], # Life360 reports accuracy in feet, but Device Tracker expects # gps_accuracy in meters. round(convert(float(loc["accuracy"]), LENGTH_FEET, LENGTH_METERS)), dt_util.utc_from_timestamp(int(loc["timestamp"])), float(loc["latitude"]), float(loc["longitude"]), name, place, round(speed, SPEED_DIGITS), bool(int(loc["wifiState"])), )
def utc_from_ts(val): try: return dt_util.utc_from_timestamp(float(val)) except (TypeError, ValueError): return None
def last_updated(self): """Returns date when it was last updated.""" if self._last_updated != 'unknown': stamp = float(self._last_updated) return utc_from_timestamp(int(stamp))
def forecast(self) -> List: """Return the forecast.""" if self.fcst_coordinator.data is None or len( self.fcst_coordinator.data) < 2: return None data = [] for forecast in self.fcst_coordinator.data: condition = next( (k for k, v in CONDITION_CLASSES.items() if forecast.weather_code in v), None, ) # Convert Wind Speed if forecast.wind_spd is None: wspeed = None else: if self._is_metric: wspeed = round(float(forecast.wind_spd) * 3.6, 1) else: wspeed = round(float(forecast.wind_spd * 2.23693629), 1) # Convert Precipitation if forecast.precip is None: precip = None else: if self._is_metric: precip = round(forecast.precip, 1) else: precip = round(float(forecast.precip) / 25.4, 2) # Convert Snowfall if forecast.snow is None: snow = None else: if self._is_metric: snow = round(forecast.snow, 1) else: snow = round(float(forecast.snow) / 25.4, 2) data.append({ ATTR_FORECAST_TIME: utc_from_timestamp(forecast.ts).isoformat(), ATTR_FORECAST_TEMP: forecast.max_temp, ATTR_FORECAST_TEMP_LOW: forecast.min_temp, ATTR_FORECAST_PRECIPITATION: precip, ATTR_WEATHERBIT_SNOW: snow, ATTR_WEATHERBIT_FCST_POP: forecast.pop, ATTR_FORECAST_CONDITION: condition, ATTR_FORECAST_WIND_SPEED: wspeed, ATTR_FORECAST_WIND_BEARING: forecast.wind_dir, }) return data
def update(self): """Update sensor with new departures times.""" # Note: using Multi because there is a bug with the single stop impl results = self._client.get_predictions_for_multi_stops( [{ 'stop_tag': int(self.stop), 'route_tag': self.route, }], self.agency, ) self._log_debug('Predictions results: %s', results) if 'Error' in results: self._log_debug('Could not get predictions: %s', results) if not results.get('predictions'): self._log_debug('No predictions available') self._state = None # Remove attributes that may now be outdated self._attributes.pop('upcoming', None) return results = results['predictions'] # Set detailed attributes self._attributes.update({ 'agency': results.get('agencyTitle'), 'route': results.get('routeTitle'), 'stop': results.get('stopTitle'), }) # List all messages in the attributes messages = listify(results.get('message', [])) self._log_debug('Messages: %s', messages) self._attributes['message'] = ' -- '.join( (message.get('text', '') for message in messages)) # List out all directions in the attributes directions = listify(results.get('direction', [])) self._attributes['direction'] = ', '.join( (direction.get('title', '') for direction in directions)) # Chain all predictions together predictions = list( chain(*[ listify(direction.get('prediction', [])) for direction in directions ])) # Short circuit if we don't have any actual bus predictions if not predictions: self._log_debug('No upcoming predictions available') self._state = None self._attributes['upcoming'] = 'No upcoming predictions' return # Generate list of upcoming times self._attributes['upcoming'] = ', '.join(p['minutes'] for p in predictions) latest_prediction = maybe_first(predictions) self._state = utc_from_timestamp( int(latest_prediction['epochTime']) / 1000).isoformat()
def update(self): """Get the latest data from Dark Sky and updates the states.""" # Call the API for new forecast data. Each sensor will re-trigger this # same exact call, but that's fine. We cache results for a short period # of time to prevent hitting API limits. Note that Dark Sky will # charge users for too many calls in 1 day, so take care when updating. self.forecast_data.update() self.update_unit_of_measurement() if self.type == "minutely_summary": self.forecast_data.update_minutely() minutely = self.forecast_data.data_minutely self._state = getattr(minutely, "summary", "") self._icon = getattr(minutely, "icon", "") elif self.type == "hourly_summary": self.forecast_data.update_hourly() hourly = self.forecast_data.data_hourly self._state = getattr(hourly, "summary", "") self._icon = getattr(hourly, "icon", "") elif self.forecast_hour is not None: self.forecast_data.update_hourly() hourly = self.forecast_data.data_hourly if hasattr(hourly, "data"): self._state = self.get_state(hourly.data[self.forecast_hour]) else: self._state = 0 elif self.type == "daily_summary": self.forecast_data.update_daily() daily = self.forecast_data.data_daily self._state = getattr(daily, "summary", "") self._icon = getattr(daily, "icon", "") elif self.type == "daily_temperature_max": self.forecast_data.update() data = self.forecast_data.data.json['daily']['data'] result = max(data, key=lambda ev: ev['temperatureHigh']) self._state = result.get('temperatureHigh') time = result.get('temperatureHighTime') self._attribute = utc_from_timestamp(time).isoformat() elif self.type == "daily_temperature_min": self.forecast_data.update() data = self.forecast_data.data.json['daily']['data'] result = min(data, key=lambda ev: ev['temperatureLow']) self._state = result.get('temperatureLow') time = result.get('temperatureLowTime') self._attribute = utc_from_timestamp(time).isoformat() elif self.type == "daily_wind_gust_max": self.forecast_data.update() data = self.forecast_data.data.json['daily']['data'] result = max(data, key=lambda ev: ev['windGust']) self._state = result.get('windGust') time = result.get('windGustTime') self._attribute = utc_from_timestamp(time).isoformat() elif self.type == "daily_percip_probability_max": self.forecast_data.update() data = self.forecast_data.data.json['daily']['data'] result = max(data, key=lambda ev: ev['precipProbability']) self._state = result.get('precipProbability') time = result.get('precipIntensityMaxTime') self._attribute = utc_from_timestamp(time).isoformat() elif self.forecast_day is not None: self.forecast_data.update_daily() daily = self.forecast_data.data_daily if hasattr(daily, "data"): self._state = self.get_state(daily.data[self.forecast_day]) else: self._state = 0 else: self.forecast_data.update_currently() currently = self.forecast_data.data_currently self._state = self.get_state(currently)
"platform": "yandex_transport", "stop_id": 9639579, "routes": ROUTES, "name": NAME, } } FILTERED_ATTRS = { "т36": ["21:43", "21:47", "22:02"], "т47": ["21:40", "22:01"], "м10": ["21:48", "22:00"], "stop_name": "7-й автобусный парк", "attribution": "Data provided by maps.yandex.ru", } RESULT_STATE = dt_util.utc_from_timestamp(1568659253).isoformat(timespec="seconds") async def assert_setup_sensor(hass, config, count=1): """Set up the sensor and assert it's been created.""" with assert_setup_component(count): assert await async_setup_component(hass, sensor.DOMAIN, config) async def test_setup_platform_valid_config(hass, mock_requester): """Test that sensor is set up properly with valid config.""" await assert_setup_sensor(hass, TEST_CONFIG) async def test_setup_platform_invalid_config(hass, mock_requester): """Check an invalid configuration."""
def update(self): """Get the latest system information.""" if self.type == "disk_use_percent": self._state = psutil.disk_usage(self.argument).percent elif self.type == "disk_use": self._state = round( psutil.disk_usage(self.argument).used / 1024**3, 1) elif self.type == "disk_free": self._state = round( psutil.disk_usage(self.argument).free / 1024**3, 1) elif self.type == "memory_use_percent": self._state = psutil.virtual_memory().percent elif self.type == "memory_use": virtual_memory = psutil.virtual_memory() self._state = round( (virtual_memory.total - virtual_memory.available) / 1024**2, 1) elif self.type == "memory_free": self._state = round(psutil.virtual_memory().available / 1024**2, 1) elif self.type == "swap_use_percent": self._state = psutil.swap_memory().percent elif self.type == "swap_use": self._state = round(psutil.swap_memory().used / 1024**2, 1) elif self.type == "swap_free": self._state = round(psutil.swap_memory().free / 1024**2, 1) elif self.type == "processor_use": self._state = round(psutil.cpu_percent(interval=None)) elif self.type == "processor_temperature": self._state = self.read_cpu_temperature() elif self.type == "process": for proc in psutil.process_iter(): try: if self.argument == proc.name(): self._state = STATE_ON return except psutil.NoSuchProcess as err: _LOGGER.warning( "Failed to load process with id: %s, old name: %s", err.pid, err.name, ) self._state = STATE_OFF elif self.type == "network_out" or self.type == "network_in": counters = psutil.net_io_counters(pernic=True) if self.argument in counters: counter = counters[self.argument][IO_COUNTER[self.type]] self._state = round(counter / 1024**2, 1) else: self._state = None elif self.type == "packets_out" or self.type == "packets_in": counters = psutil.net_io_counters(pernic=True) if self.argument in counters: self._state = counters[self.argument][IO_COUNTER[self.type]] else: self._state = None elif (self.type == "throughput_network_out" or self.type == "throughput_network_in"): counters = psutil.net_io_counters(pernic=True) if self.argument in counters: counter = counters[self.argument][IO_COUNTER[self.type]] now = dt_util.utcnow() if self._last_value and self._last_value < counter: self._state = round( (counter - self._last_value) / 1000**2 / (now - self._last_update_time).seconds, 3, ) else: self._state = None self._last_update_time = now self._last_value = counter else: self._state = None elif self.type == "ipv4_address" or self.type == "ipv6_address": addresses = psutil.net_if_addrs() if self.argument in addresses: for addr in addresses[self.argument]: if addr.family == IF_ADDRS_FAMILY[self.type]: self._state = addr.address else: self._state = None elif self.type == "last_boot": self._state = dt_util.as_local( dt_util.utc_from_timestamp(psutil.boot_time())).isoformat() elif self.type == "load_1m": self._state = round(os.getloadavg()[0], 2) elif self.type == "load_5m": self._state = round(os.getloadavg()[1], 2) elif self.type == "load_15m": self._state = round(os.getloadavg()[2], 2)
key="nextchange_temperature", name="Next Scheduled Temperature", native_unit_of_measurement=TEMP_CELSIUS, device_class=SensorDeviceClass.TEMPERATURE, suitable=lambda device: device.has_thermostat and device. nextchange_temperature is not None, native_value=lambda device: device. nextchange_temperature, # type: ignore[no-any-return] ), FritzSensorEntityDescription( key="nextchange_time", name="Next Scheduled Change Time", device_class=SensorDeviceClass.TIMESTAMP, suitable=lambda device: device.has_thermostat and device. nextchange_endperiod is not None, native_value=lambda device: utc_from_timestamp(device. nextchange_endperiod), ), FritzSensorEntityDescription( key="nextchange_preset", name="Next Scheduled Preset", suitable=lambda device: device.has_thermostat and device. nextchange_temperature is not None, native_value=lambda device: PRESET_ECO if device.nextchange_temperature == device.eco_temperature else PRESET_COMFORT, ), FritzSensorEntityDescription( key="scheduled_preset", name="Current Scheduled Preset", suitable=lambda device: device.has_thermostat and device. nextchange_temperature is not None, native_value=lambda device: PRESET_COMFORT if device.