async def get(self, request, datetime=None): """Return history over a period of time.""" timer_start = time.perf_counter() if datetime: datetime = dt_util.parse_datetime(datetime) if datetime is None: return self.json_message('Invalid datetime', HTTP_BAD_REQUEST) now = dt_util.utcnow() one_day = timedelta(days=1) if datetime: start_time = dt_util.as_utc(datetime) else: start_time = now - one_day if start_time > now: return self.json([]) end_time = request.query.get('end_time') if end_time: end_time = dt_util.parse_datetime(end_time) if end_time: end_time = dt_util.as_utc(end_time) else: return self.json_message('Invalid end_time', HTTP_BAD_REQUEST) else: end_time = start_time + one_day entity_ids = request.query.get('filter_entity_id') if entity_ids: entity_ids = entity_ids.lower().split(',') include_start_time_state = 'skip_initial_state' not in request.query hass = request.app['hass'] result = await hass.async_add_job( get_significant_states, hass, start_time, end_time, entity_ids, self.filters, include_start_time_state) result = list(result.values()) if _LOGGER.isEnabledFor(logging.DEBUG): elapsed = time.perf_counter() - timer_start _LOGGER.debug( 'Extracted %d states in %fs', sum(map(len, result)), elapsed) # Optionally reorder the result to respect the ordering given # by any entities explicitly included in the configuration. if self.use_include_order: sorted_result = [] for order_entity in self.filters.included_entities: for state_list in result: if state_list[0].entity_id == order_entity: sorted_result.append(state_list) result.remove(state_list) break sorted_result.extend(result) result = sorted_result return await hass.async_add_job(self.json, result)
def get(self, request, date=None): """Retrieve logbook entries.""" if date: start_day = dt_util.start_of_local_day(date) else: start_day = dt_util.start_of_local_day() end_day = start_day + timedelta(days=1) events = recorder.query_events( QUERY_EVENTS_BETWEEN, (dt_util.as_utc(start_day), dt_util.as_utc(end_day))) return self.json(humanify(events))
def update(self): """Get the latest data and updates the states.""" # Parse templates self.update_period() start, end = self._period # Convert to UTC start = dt_util.as_utc(start) end = dt_util.as_utc(end) # Get history between start and end history_list = history.state_changes_during_period( self.hass, start, end, str(self._entity_id)) if self._entity_id not in history_list.keys(): return # Get the first state last_state = history.get_state(self.hass, start, self._entity_id) last_state = (last_state is not None and last_state == self._entity_state) last_time = dt_util.as_timestamp(start) elapsed = 0 count = 0 # Make calculations for item in history_list.get(self._entity_id): current_state = item.state == self._entity_state current_time = item.last_changed.timestamp() if last_state: elapsed += current_time - last_time if current_state and not last_state: count += 1 last_state = current_state last_time = current_time # Count time elapsed between last history state and end of measure if last_state: measure_end = min(dt_util.as_timestamp(end), dt_util.as_timestamp( datetime.datetime.now())) elapsed += measure_end - last_time # Save value in hours self.value = elapsed / 3600 # Save counter self.count = count
async def get(self, request, datetime=None): """Retrieve logbook entries.""" if datetime: datetime = dt_util.parse_datetime(datetime) if datetime is None: return self.json_message('Invalid datetime', HTTP_BAD_REQUEST) else: datetime = dt_util.start_of_local_day() period = request.query.get('period') if period is None: period = 1 else: period = int(period) entity_id = request.query.get('entity') start_day = dt_util.as_utc(datetime) - timedelta(days=period - 1) end_day = start_day + timedelta(days=period) hass = request.app['hass'] def json_events(): """Fetch events and generate JSON.""" return self.json(list( _get_events(hass, self.config, start_day, end_day, entity_id))) return await hass.async_add_job(json_events)
def get(self, request, datetime=None): """Return history over a period of time.""" if datetime: datetime = dt_util.parse_datetime(datetime) if datetime is None: return self.json_message('Invalid datetime', HTTP_BAD_REQUEST) now = dt_util.utcnow() one_day = timedelta(days=1) if datetime: start_time = dt_util.as_utc(datetime) else: start_time = now - one_day if start_time > now: return self.json([]) end_time = start_time + one_day entity_id = request.GET.get('filter_entity_id') result = yield from request.app['hass'].loop.run_in_executor( None, get_significant_states, start_time, end_time, entity_id, self.filters) return self.json(result.values())
def handle_new_task(call): """Call when a user creates a new Todoist Task from HASS.""" project_name = call.data[PROJECT_NAME] project_id = project_id_lookup[project_name] # Create the task item = api.items.add(call.data[CONTENT], project_id) if LABELS in call.data: task_labels = call.data[LABELS] label_ids = [ label_id_lookup[label.lower()] for label in task_labels] item.update(labels=label_ids) if PRIORITY in call.data: item.update(priority=call.data[PRIORITY]) if DUE_DATE in call.data: due_date = dt.parse_datetime(call.data[DUE_DATE]) if due_date is None: due = dt.parse_date(call.data[DUE_DATE]) due_date = datetime(due.year, due.month, due.day) # Format it in the manner Todoist expects due_date = dt.as_utc(due_date) date_format = '%Y-%m-%dT%H:%M' due_date = datetime.strftime(due_date, date_format) item.update(due_date_utc=due_date) # Commit changes api.commit() _LOGGER.debug("Created Todoist task: %s", call.data[CONTENT])
def _update_info(self, now=None): for person in self.service.get_all_people(): try: dev_id = 'google_maps_{0}'.format(slugify(person.id)) except TypeError: _LOGGER.warning("No location(s) shared with this account") return if self.max_gps_accuracy is not None and \ person.accuracy > self.max_gps_accuracy: _LOGGER.info("Ignoring %s update because expected GPS " "accuracy %s is not met: %s", person.nickname, self.max_gps_accuracy, person.accuracy) continue attrs = { ATTR_ADDRESS: person.address, ATTR_FULL_NAME: person.full_name, ATTR_ID: person.id, ATTR_LAST_SEEN: dt_util.as_utc(person.datetime), ATTR_NICKNAME: person.nickname, ATTR_BATTERY_CHARGING: person.charging, ATTR_BATTERY_LEVEL: person.battery_level } self.see( dev_id=dev_id, gps=(person.latitude, person.longitude), picture=person.picture_url, source_type=SOURCE_TYPE_GPS, gps_accuracy=person.accuracy, attributes=attrs, )
async def async_update(self): """Get the latest data and updates the states.""" now = dt_util.utcnow() if self._tibber_home.current_price_total and self._last_updated and \ dt_util.as_utc(dt_util.parse_datetime(self._last_updated)).hour\ == now.hour: return def _find_current_price(): for key, price_total in self._tibber_home.price_total.items(): price_time = dt_util.as_utc(dt_util.parse_datetime(key)) time_diff = (now - price_time).total_seconds()/60 if time_diff >= 0 and time_diff < 60: self._state = round(price_total, 2) self._last_updated = key return True return False if _find_current_price(): return _LOGGER.debug("No cached data found, so asking for new data") await self._tibber_home.update_info() await self._tibber_home.update_price_info() data = self._tibber_home.info['viewer']['home'] self._device_state_attributes['app_nickname'] = data['appNickname'] self._device_state_attributes['grid_company'] =\ data['meteringPointData']['gridCompany'] self._device_state_attributes['estimated_annual_consumption'] =\ data['meteringPointData']['estimatedAnnualConsumption'] _find_current_price()
def _get_date(date): """Get the dateTime from date or dateTime as a local.""" if 'date' in date: return dt.as_utc(dt.dt.datetime.combine( dt.parse_date(date['date']), dt.dt.time())) else: return dt.parse_datetime(date['dateTime'])
def _handle_get_logbook(handler, path_match, data): """ Return logbook entries. """ start_today = dt_util.now().replace(hour=0, minute=0, second=0) handler.write_json(humanify( recorder.query_events( QUERY_EVENTS_AFTER, (dt_util.as_utc(start_today),))))
def test_notify_file(self, mock_utcnow): """Test the notify file output.""" mock_utcnow.return_value = dt_util.as_utc(dt_util.now()) with tempfile.TemporaryDirectory() as tempdirname: filename = os.path.join(tempdirname, 'notify.txt') message = 'one, two, testing, testing' self.assertTrue(setup_component(self.hass, notify.DOMAIN, { 'notify': { 'name': 'test', 'platform': 'file', 'filename': filename, 'timestamp': False, } })) title = '{} notifications (Log started: {})\n{}\n'.format( ATTR_TITLE_DEFAULT, dt_util.utcnow().isoformat(), '-' * 80) self.hass.services.call('notify', 'test', {'message': message}, blocking=True) result = open(filename).read() self.assertEqual(result, "{}{}\n".format(title, message))
def get(self, request, datetime=None): """Retrieve logbook entries.""" if datetime: datetime = dt_util.parse_datetime(datetime) if datetime is None: return self.json_message('Invalid datetime', HTTP_BAD_REQUEST) else: datetime = dt_util.start_of_local_day() start_day = dt_util.as_utc(datetime) end_day = start_day + timedelta(days=1) def get_results(): """Query DB for results.""" events = recorder.get_model('Events') query = recorder.query('Events').order_by( events.time_fired).filter( (events.time_fired > start_day) & (events.time_fired < end_day)) events = recorder.execute(query) return _exclude_events(events, self.config) events = yield from request.app['hass'].loop.run_in_executor( None, get_results) return self.json(humanify(events))
def _process_timestamp(ts): """Process a timestamp into datetime object.""" if ts is None: return None if ts.tzinfo is None: return dt_util.UTC.localize(ts) return dt_util.as_utc(ts)
def test_now(self): """Test the now method.""" dt_util.set_default_time_zone(dt_util.get_time_zone(TEST_TIME_ZONE)) self.assertAlmostEqual( dt_util.as_utc(dt_util.now()).replace(tzinfo=None), datetime.utcnow(), delta=timedelta(seconds=1))
def test_as_utc_with_local_object(self): """Test the UTC time with local object.""" dt_util.set_default_time_zone(dt_util.get_time_zone(TEST_TIME_ZONE)) localnow = dt_util.now() utcnow = dt_util.as_utc(localnow) assert localnow == utcnow assert localnow.tzinfo != utcnow.tzinfo
def test_as_utc_with_local_object(self): dt_util.set_default_time_zone(dt_util.get_time_zone(TEST_TIME_ZONE)) localnow = dt_util.now() utcnow = dt_util.as_utc(localnow) self.assertEqual(localnow, utcnow) self.assertNotEqual(localnow.tzinfo, utcnow.tzinfo)
def test_now(self): """Test the now method.""" dt_util.set_default_time_zone(dt_util.get_time_zone(TEST_TIME_ZONE)) assert abs( dt_util.as_utc(dt_util.now()).replace( tzinfo=None ) - datetime.utcnow() ) < timedelta(seconds=1)
def _find_current_price(): for key, price_total in self._tibber_home.price_total.items(): price_time = dt_util.as_utc(dt_util.parse_datetime(key)) time_diff = (now - price_time).total_seconds()/60 if time_diff >= 0 and time_diff < 60: self._state = round(price_total, 2) self._last_updated = key return True return False
def get(self, request, datetime=None): """Return history over a period of time.""" timer_start = time.perf_counter() if datetime: datetime = dt_util.parse_datetime(datetime) if datetime is None: return self.json_message('Invalid datetime', HTTP_BAD_REQUEST) now = dt_util.utcnow() one_day = timedelta(days=1) if datetime: start_time = dt_util.as_utc(datetime) else: start_time = now - one_day if start_time > now: return self.json([]) end_time = request.query.get('end_time') if end_time: end_time = dt_util.parse_datetime(end_time) if end_time: end_time = dt_util.as_utc(end_time) else: return self.json_message('Invalid end_time', HTTP_BAD_REQUEST) else: end_time = start_time + one_day entity_ids = request.query.get('filter_entity_id') if entity_ids: entity_ids = entity_ids.lower().split(',') include_start_time_state = 'skip_initial_state' not in request.query result = yield from request.app['hass'].async_add_job( get_significant_states, request.app['hass'], start_time, end_time, entity_ids, self.filters, include_start_time_state) result = result.values() if _LOGGER.isEnabledFor(logging.DEBUG): elapsed = time.perf_counter() - timer_start _LOGGER.debug( 'Extracted %d states in %fs', sum(map(len, result)), elapsed) return self.json(result)
def _handle_get_logbook(handler, path_match, data): """ Return logbook entries. """ date_str = path_match.group("date") if date_str: start_date = dt_util.date_str_to_date(date_str) if start_date is None: handler.write_json_message("Error parsing JSON", HTTP_BAD_REQUEST) return start_day = dt_util.start_of_local_day(start_date) else: start_day = dt_util.start_of_local_day() end_day = start_day + timedelta(days=1) events = recorder.query_events(QUERY_EVENTS_BETWEEN, (dt_util.as_utc(start_day), dt_util.as_utc(end_day))) handler.write_json(humanify(events))
def get(self, request, datetime=None): """Retrieve logbook entries.""" start_day = dt_util.as_utc(datetime or dt_util.start_of_local_day()) end_day = start_day + timedelta(days=1) events = recorder.get_model('Events') query = recorder.query('Events').filter( (events.time_fired > start_day) & (events.time_fired < end_day)) events = recorder.execute(query) return self.json(humanify(events))
def track_point_in_time(self, action, point_in_time): """ Adds a listener that fires once after a spefic point in time. """ utc_point_in_time = date_util.as_utc(point_in_time) @ft.wraps(action) def utc_converter(utc_now): """ Converts passed in UTC now to local now. """ action(date_util.as_local(utc_now)) self.track_point_in_utc_time(utc_converter, utc_point_in_time)
def get(self, request, datetime=None): """Return history over a period of time.""" timer_start = time.perf_counter() if datetime: datetime = dt_util.parse_datetime(datetime) if datetime is None: return self.json_message('Invalid datetime', HTTP_BAD_REQUEST) now = dt_util.utcnow() one_day = timedelta(days=1) if datetime: start_time = dt_util.as_utc(datetime) else: start_time = now - one_day if start_time > now: return self.json([]) end_time = request.GET.get('end_time') if end_time: end_time = dt_util.as_utc( dt_util.parse_datetime(end_time)) if end_time is None: return self.json_message('Invalid end_time', HTTP_BAD_REQUEST) else: end_time = start_time + one_day entity_id = request.GET.get('filter_entity_id') result = yield from request.app['hass'].loop.run_in_executor( None, get_significant_states, request.app['hass'], start_time, end_time, entity_id, self.filters) result = result.values() if _LOGGER.isEnabledFor(logging.DEBUG): elapsed = time.perf_counter() - timer_start _LOGGER.debug( 'Extracted %d states in %fs', sum(map(len, result)), elapsed) return self.json(result)
def get(self, request, datetime=None): """Return history over a period of time.""" one_day = timedelta(days=1) if datetime: start_time = dt_util.as_utc(datetime) else: start_time = dt_util.utcnow() - one_day end_time = start_time + one_day entity_id = request.args.get("filter_entity_id") return self.json(get_significant_states(start_time, end_time, entity_id).values())
def async_update(self): """Get the latest data and updates the states.""" if self._tibber_home.current_price_total and self._last_updated and \ dt_util.as_utc(dt_util.parse_datetime(self._last_updated)).hour\ == dt_util.utcnow().hour: return yield from self._tibber_home.update_current_price_info() self._state = self._tibber_home.current_price_total self._last_updated = self._tibber_home.current_price_info.\ get('startsAt') self._device_state_attributes = self._tibber_home.current_price_info self._unit_of_measurement = self._tibber_home.price_unit
def update(self): """Get the latest data and updates the states.""" # Parse templates self.update_period() start, end = self._period # Convert to UTC start = dt_util.as_utc(start) end = dt_util.as_utc(end) # Get history between start and end history_list = history.state_changes_during_period( start, end, str(self._entity_id)) if self._entity_id not in history_list.keys(): return # Get the first state last_state = history.get_state(start, self._entity_id) last_state = (last_state is not None and last_state == self._entity_state) last_time = dt_util.as_timestamp(start) elapsed = 0 # Make calculations for item in history_list.get(self._entity_id): current_state = item.state == self._entity_state current_time = item.last_changed.timestamp() if last_state: elapsed += current_time - last_time last_state = current_state last_time = current_time # Save value in hours self.value = elapsed / 3600
def _test_notify_file(self, timestamp, mock_utcnow, mock_stat): """Test the notify file output.""" mock_utcnow.return_value = dt_util.as_utc(dt_util.now()) mock_stat.return_value.st_size = 0 m_open = mock_open() with patch( 'homeassistant.components.notify.file.open', m_open, create=True ): filename = 'mock_file' message = 'one, two, testing, testing' with assert_setup_component(1) as handle_config: self.assertTrue(setup_component(self.hass, notify.DOMAIN, { 'notify': { 'name': 'test', 'platform': 'file', 'filename': filename, 'timestamp': timestamp, } })) assert handle_config[notify.DOMAIN] title = '{} notifications (Log started: {})\n{}\n'.format( ATTR_TITLE_DEFAULT, dt_util.utcnow().isoformat(), '-' * 80) self.hass.services.call('notify', 'test', {'message': message}, blocking=True) full_filename = os.path.join(self.hass.config.path(), filename) self.assertEqual(m_open.call_count, 1) self.assertEqual(m_open.call_args, call(full_filename, 'a')) self.assertEqual(m_open.return_value.write.call_count, 2) if not timestamp: self.assertEqual( m_open.return_value.write.call_args_list, [call(title), call('{}\n'.format(message))] ) else: self.assertEqual( m_open.return_value.write.call_args_list, [call(title), call('{} {}\n'.format( dt_util.utcnow().isoformat(), message))] )
def get(self, request, datetime=None): """Retrieve logbook entries.""" if datetime: datetime = dt_util.parse_datetime(datetime) if datetime is None: return self.json_message('Invalid datetime', HTTP_BAD_REQUEST) else: datetime = dt_util.start_of_local_day() start_day = dt_util.as_utc(datetime) end_day = start_day + timedelta(days=1) hass = request.app['hass'] events = yield from hass.async_add_job( _get_events, hass, self.config, start_day, end_day) return self.json(events)
def register(): """Post connection initialize.""" self.async_db_ready.set_result(True) def shutdown(event): """Shut down the Recorder.""" if not hass_started.done(): hass_started.set_result(shutdown_task) self.queue.put(None) self.join() self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shutdown) if self.hass.state == CoreState.running: hass_started.set_result(None) else: @callback def notify_hass_started(event): """Notify that hass has started.""" hass_started.set_result(None) self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, notify_hass_started) if self.keep_days and self.purge_interval: @callback def async_purge(now): """Trigger the purge and schedule the next run.""" self.queue.put(PurgeTask(self.keep_days)) self.hass.helpers.event.async_track_point_in_time( async_purge, now + timedelta(days=self.purge_interval)) earliest = dt_util.utcnow() + timedelta(minutes=30) run = latest = dt_util.utcnow() + \ timedelta(days=self.purge_interval) with session_scope(session=self.get_session()) as session: event = session.query(Events).first() if event is not None: session.expunge(event) run = dt_util.as_utc(event.time_fired) + \ timedelta(days=self.keep_days+self.purge_interval) run = min(latest, max(run, earliest)) self.hass.helpers.event.async_track_point_in_time( async_purge, run)
def get(self, request, datetime=None): """Retrieve logbook entries.""" if datetime: datetime = dt_util.parse_datetime(datetime) if datetime is None: return self.json_message('Invalid datetime', HTTP_BAD_REQUEST) else: datetime = dt_util.start_of_local_day() start_day = dt_util.as_utc(datetime) end_day = start_day + timedelta(days=1) hass = request.app['hass'] events = yield from hass.loop.run_in_executor( None, _get_events, hass, start_day, end_day) events = _exclude_events(events, self.config) return self.json(humanify(events))
def _adapt_datetime(datetimestamp): """Turn a datetime into an integer for in the DB.""" return dt_util.as_utc(datetimestamp).timestamp()
def update_attributes(self) -> None: """Update state attributes.""" # Add departure information if self._departure: self._attributes[ATTR_ARRIVAL] = dt_util.as_utc( self._departure["arrival_time"]).isoformat() self._attributes[ATTR_DAY] = self._departure["day"] if self._departure[ATTR_FIRST] is not None: self._attributes[ATTR_FIRST] = self._departure["first"] elif ATTR_FIRST in self._attributes: del self._attributes[ATTR_FIRST] if self._departure[ATTR_LAST] is not None: self._attributes[ATTR_LAST] = self._departure["last"] elif ATTR_LAST in self._attributes: del self._attributes[ATTR_LAST] else: if ATTR_ARRIVAL in self._attributes: del self._attributes[ATTR_ARRIVAL] if ATTR_DAY in self._attributes: del self._attributes[ATTR_DAY] if ATTR_FIRST in self._attributes: del self._attributes[ATTR_FIRST] if ATTR_LAST in self._attributes: del self._attributes[ATTR_LAST] # Add contextual information self._attributes[ATTR_OFFSET] = self._offset.seconds / 60 if self._state is None: self._attributes[ATTR_INFO] = ("No more departures" if self._include_tomorrow else "No more departures today") elif ATTR_INFO in self._attributes: del self._attributes[ATTR_INFO] if self._agency: self._attributes[ATTR_ATTRIBUTION] = self._agency.agency_name elif ATTR_ATTRIBUTION in self._attributes: del self._attributes[ATTR_ATTRIBUTION] # Add extra metadata key = "agency_id" if self._agency and key not in self._attributes: self.append_keys(self.dict_for_table(self._agency), "Agency") key = "origin_station_stop_id" if self._origin and key not in self._attributes: self.append_keys(self.dict_for_table(self._origin), "Origin Station") self._attributes[ATTR_LOCATION_ORIGIN] = LOCATION_TYPE_OPTIONS.get( self._origin.location_type, LOCATION_TYPE_DEFAULT) self._attributes[ ATTR_WHEELCHAIR_ORIGIN] = WHEELCHAIR_BOARDING_OPTIONS.get( self._origin.wheelchair_boarding, WHEELCHAIR_BOARDING_DEFAULT) key = "destination_station_stop_id" if self._destination and key not in self._attributes: self.append_keys(self.dict_for_table(self._destination), "Destination Station") self._attributes[ ATTR_LOCATION_DESTINATION] = LOCATION_TYPE_OPTIONS.get( self._destination.location_type, LOCATION_TYPE_DEFAULT) self._attributes[ ATTR_WHEELCHAIR_DESTINATION] = WHEELCHAIR_BOARDING_OPTIONS.get( self._destination.wheelchair_boarding, WHEELCHAIR_BOARDING_DEFAULT) # Manage Route metadata key = "route_id" if not self._route and key in self._attributes: self.remove_keys("Route") elif self._route and (key not in self._attributes or self._attributes[key] != self._route.route_id): self.append_keys(self.dict_for_table(self._route), "Route") self._attributes[ATTR_ROUTE_TYPE] = ROUTE_TYPE_OPTIONS[ self._route.route_type] # Manage Trip metadata key = "trip_id" if not self._trip and key in self._attributes: self.remove_keys("Trip") elif self._trip and (key not in self._attributes or self._attributes[key] != self._trip.trip_id): self.append_keys(self.dict_for_table(self._trip), "Trip") self._attributes[ATTR_BICYCLE] = BICYCLE_ALLOWED_OPTIONS.get( self._trip.bikes_allowed, BICYCLE_ALLOWED_DEFAULT) self._attributes[ATTR_WHEELCHAIR] = WHEELCHAIR_ACCESS_OPTIONS.get( self._trip.wheelchair_accessible, WHEELCHAIR_ACCESS_DEFAULT) # Manage Stop Times metadata prefix = "origin_stop" if self._departure: self.append_keys(self._departure["origin_stop_time"], prefix) self._attributes[ATTR_DROP_OFF_ORIGIN] = DROP_OFF_TYPE_OPTIONS.get( self._departure["origin_stop_time"]["Drop Off Type"], DROP_OFF_TYPE_DEFAULT, ) self._attributes[ATTR_PICKUP_ORIGIN] = PICKUP_TYPE_OPTIONS.get( self._departure["origin_stop_time"]["Pickup Type"], PICKUP_TYPE_DEFAULT) self._attributes[ATTR_TIMEPOINT_ORIGIN] = TIMEPOINT_OPTIONS.get( self._departure["origin_stop_time"]["Timepoint"], TIMEPOINT_DEFAULT) else: self.remove_keys(prefix) prefix = "destination_stop" if self._departure: self.append_keys(self._departure["destination_stop_time"], prefix) self._attributes[ ATTR_DROP_OFF_DESTINATION] = DROP_OFF_TYPE_OPTIONS.get( self._departure["destination_stop_time"]["Drop Off Type"], DROP_OFF_TYPE_DEFAULT, ) self._attributes[ ATTR_PICKUP_DESTINATION] = PICKUP_TYPE_OPTIONS.get( self._departure["destination_stop_time"]["Pickup Type"], PICKUP_TYPE_DEFAULT, ) self._attributes[ ATTR_TIMEPOINT_DESTINATION] = TIMEPOINT_OPTIONS.get( self._departure["destination_stop_time"]["Timepoint"], TIMEPOINT_DEFAULT) else: self.remove_keys(prefix)
async def test_if_fires_using_at_input_datetime(hass, calls, has_date, has_time): """Test for firing at input_datetime.""" await async_setup_component( hass, "input_datetime", {"input_datetime": {"trigger": {"has_date": has_date, "has_time": has_time}}}, ) now = dt_util.now() trigger_dt = now.replace( hour=5 if has_time else 0, minute=0, second=0, microsecond=0 ) + timedelta(2) await hass.services.async_call( "input_datetime", "set_datetime", { ATTR_ENTITY_ID: "input_datetime.trigger", "datetime": str(trigger_dt.replace(tzinfo=None)), }, blocking=True, ) time_that_will_not_match_right_away = trigger_dt - timedelta(minutes=1) some_data = "{{ trigger.platform }}-{{ trigger.now.day }}-{{ trigger.now.hour }}-{{trigger.entity_id}}" with patch( "homeassistant.util.dt.utcnow", return_value=dt_util.as_utc(time_that_will_not_match_right_away), ): assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: { "trigger": {"platform": "time", "at": "input_datetime.trigger"}, "action": { "service": "test.automation", "data_template": {"some": some_data}, }, } }, ) await hass.async_block_till_done() async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1)) await hass.async_block_till_done() assert len(calls) == 1 assert ( calls[0].data["some"] == f"time-{trigger_dt.day}-{trigger_dt.hour}-input_datetime.trigger" ) if has_date: trigger_dt += timedelta(days=1) if has_time: trigger_dt += timedelta(hours=1) await hass.services.async_call( "input_datetime", "set_datetime", { ATTR_ENTITY_ID: "input_datetime.trigger", "datetime": str(trigger_dt.replace(tzinfo=None)), }, blocking=True, ) async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1)) await hass.async_block_till_done() assert len(calls) == 2 assert ( calls[1].data["some"] == f"time-{trigger_dt.day}-{trigger_dt.hour}-input_datetime.trigger" )
def test_as_utc_with_naive_object(self): utcnow = datetime.utcnow() self.assertEqual(utcnow, dt_util.as_utc(utcnow).replace(tzinfo=None))
def update(self): ftype = None units = None last_update = None value = None ffvalue = None renvalue = None genvalue = None if not self.should_update(): _LOGGER.debug("OpenNEM was last updated %s minutes ago, skipping update", (dt_util.utcnow() - self.last_updated)) return try: result = requests.get(self._build_url(), timeout=10).json() ###OpenNEM Data for row in result: if row['type'] == "power": ftype = (row['fuel_tech']) else: ftype = (row['type']) units = row['units'] last_update = row['history']['last'] if row['type'] == "temperature": value = row['history']['data'][-2] else: value = row['history']['data'][-1] # DATA[ftype][1]=config.get(CONF_REGION) DATA[ftype][2]=units DATA[ftype][3]=last_update if value: DATA[ftype][4]=round(value,2) else: DATA[ftype][4]=0 ffvalue = DATA['black_coal'][4] + DATA["distillate"][4] + DATA["brown_coal"][4] + DATA["gas_ccgt"][4] + DATA["gas_ocgt"][4] + DATA["gas_recip"][4] + DATA["gas_steam"][4] if ffvalue: DATA['fossilfuel'][4] = round(ffvalue,2) else: DATA['fossilfuel'][4] = 0 renvalue = DATA['biomass'][4]+DATA["hydro"][4]+DATA["solar"][4]+DATA["wind"][4]+DATA["rooftop_solar"][4] if renvalue: DATA['renewables'][4] = round(renvalue,2) else: DATA['renewables'][4] = 0 genvalue = DATA['fossilfuel'][4]+DATA['renewables'][4] if genvalue: DATA['generation'][4] = round(genvalue,2) else: DATA['generation'][4] = 0 self._data = DATA self.last_updated = dt_util.as_utc(datetime.datetime.strptime(str(self._data['demand'][3]), "%Y-%m-%dT%H:%M+1000")) _LOGGER.debug("OpenNEM: Last Updated %s", self.last_updated) return except ValueError as err: _LOGGER.error("Check OpenNEM %s", err.args) self._data = None raise
def _adapt_datetime(datetimestamp): """Turn a datetime into an integer for in the DB.""" return dt_util.as_utc(datetimestamp.replace(microsecond=0)).timestamp()
def test_sun_offset(self): """Test sun event with offset.""" test_time = self.hass.config.time_zone.localize(datetime( 2019, 1, 12)).astimezone(pytz.UTC) sunrise = dt_util.as_local( get_astral_event_date(self.hass, "sunrise", dt_util.as_utc(test_time)) + timedelta(hours=-1, minutes=-30)) sunset = dt_util.as_local( get_astral_event_date(self.hass, "sunset", dt_util.as_utc( test_time)) + timedelta(hours=1, minutes=30)) config = { "binary_sensor": [{ "platform": "tod", "name": "Day", "after": "sunrise", "after_offset": "-1:30", "before": "sunset", "before_offset": "1:30", }] } entity_id = "binary_sensor.day" testtime = sunrise + timedelta(seconds=-1) with patch( "homeassistant.components.tod.binary_sensor.dt_util.utcnow", return_value=testtime, ): setup_component(self.hass, "binary_sensor", config) self.hass.block_till_done() self.hass.block_till_done() state = self.hass.states.get(entity_id) assert state.state == STATE_OFF testtime = sunrise with patch( "homeassistant.components.tod.binary_sensor.dt_util.utcnow", return_value=testtime, ): self.hass.bus.fire(ha.EVENT_TIME_CHANGED, {ha.ATTR_NOW: testtime}) self.hass.block_till_done() state = self.hass.states.get(entity_id) assert state.state == STATE_ON testtime = sunrise + timedelta(seconds=1) with patch( "homeassistant.components.tod.binary_sensor.dt_util.utcnow", return_value=testtime, ): self.hass.bus.fire(ha.EVENT_TIME_CHANGED, {ha.ATTR_NOW: testtime}) self.hass.block_till_done() state = self.hass.states.get(entity_id) assert state.state == STATE_ON self.hass.block_till_done() testtime = sunset + timedelta(seconds=-1) with patch( "homeassistant.components.tod.binary_sensor.dt_util.utcnow", return_value=testtime, ): self.hass.bus.fire(ha.EVENT_TIME_CHANGED, {ha.ATTR_NOW: testtime}) self.hass.block_till_done() state = self.hass.states.get(entity_id) assert state.state == STATE_ON self.hass.block_till_done() testtime = sunset with patch( "homeassistant.components.tod.binary_sensor.dt_util.utcnow", return_value=testtime, ): self.hass.bus.fire(ha.EVENT_TIME_CHANGED, {ha.ATTR_NOW: testtime}) self.hass.block_till_done() state = self.hass.states.get(entity_id) assert state.state == STATE_OFF self.hass.block_till_done() testtime = sunset + timedelta(seconds=1) with patch( "homeassistant.components.tod.binary_sensor.dt_util.utcnow", return_value=testtime, ): self.hass.bus.fire(ha.EVENT_TIME_CHANGED, {ha.ATTR_NOW: testtime}) self.hass.block_till_done() state = self.hass.states.get(entity_id) assert state.state == STATE_OFF test_time = test_time + timedelta(days=1) sunrise = dt_util.as_local( get_astral_event_date(self.hass, "sunrise", dt_util.as_utc(test_time)) + timedelta(hours=-1, minutes=-30)) testtime = sunrise with patch( "homeassistant.components.tod.binary_sensor.dt_util.utcnow", return_value=testtime, ): self.hass.bus.fire(ha.EVENT_TIME_CHANGED, {ha.ATTR_NOW: testtime}) self.hass.block_till_done() state = self.hass.states.get(entity_id) assert state.state == STATE_ON
def static_datetime(): """Build a datetime object for testing in the correct timezone.""" return dt.as_utc(datetime(2020, 6, 12, 8, 0, 0))
def _update_info(self, entity_id, old_state, new_state, init=False): if new_state is None: return with self._lock: # Get time device was last seen, which is the entity's last_seen # attribute, or if that doesn't exist, then last_updated from the # new state object. Make sure last_seen is timezone aware in UTC. # Note that dt_util.as_utc assumes naive datetime is in local # timezone. last_seen = new_state.attributes.get(ATTR_LAST_SEEN) if isinstance(last_seen, datetime): last_seen = dt_util.as_utc(last_seen) else: try: last_seen = dt_util.utc_from_timestamp(float(last_seen)) except (TypeError, ValueError): last_seen = new_state.last_updated # Is this newer info than last update? if self._prev_seen and last_seen <= self._prev_seen: _LOGGER.debug( 'For {} skipping update from {}: ' 'last_seen not newer than previous update ({} <= {})'. format(self._entity_id, entity_id, last_seen, self._prev_seen)) return # Try to get GPS and battery data. try: gps = (new_state.attributes[ATTR_LATITUDE], new_state.attributes[ATTR_LONGITUDE]) except KeyError: gps = None gps_accuracy = new_state.attributes.get(ATTR_GPS_ACCURACY) battery = new_state.attributes.get( ATTR_BATTERY, new_state.attributes.get(ATTR_BATTERY_LEVEL)) charging = new_state.attributes.get( ATTR_BATTERY_CHARGING, new_state.attributes.get(ATTR_CHARGING)) # Don't use location_name unless we have to. location_name = None # What type of tracker is this? if new_state.domain == BS_DOMAIN: source_type = SOURCE_TYPE_BINARY_SENSOR else: source_type = new_state.attributes.get(ATTR_SOURCE_TYPE) state = new_state.state if source_type == SOURCE_TYPE_GPS: # GPS coordinates and accuracy are required. if gps is None: self._bad_entity(entity_id, 'missing gps attributes', init) return if gps_accuracy is None: self._bad_entity(entity_id, 'missing gps_accuracy attribute', init) return self._good_entity(entity_id, SOURCE_TYPE_GPS, state) elif source_type in SOURCE_TYPE_NON_GPS: # Convert 'on'/'off' state of binary_sensor # to 'home'/'not_home'. if source_type == SOURCE_TYPE_BINARY_SENSOR: if state == STATE_BINARY_SENSOR_HOME: state = STATE_HOME else: state = STATE_NOT_HOME self._good_entity(entity_id, source_type, state) if not self._use_non_gps_data(state): return # Don't use new GPS data if it's not complete. if gps is None or gps_accuracy is None: gps = gps_accuracy = None # Get current GPS data, if any, and determine if it is in # 'zone.home'. cur_state = self._hass.states.get(self._entity_id) try: cur_lat = cur_state.attributes[ATTR_LATITUDE] cur_lon = cur_state.attributes[ATTR_LONGITUDE] cur_acc = cur_state.attributes[ATTR_GPS_ACCURACY] cur_gps_is_home = (active_zone( self._hass, cur_lat, cur_lon, cur_acc).entity_id == ENTITY_ID_HOME) except (AttributeError, KeyError): cur_gps_is_home = False # It's important, for this composite tracker, to avoid the # component level code's "stale processing." This can be done # one of two ways: 1) provide GPS data w/ source_type of gps, # or 2) provide a location_name (that will be used as the new # state.) # If router entity's state is 'home' and current GPS data from # composite entity is available and is in 'zone.home', # use it and make source_type gps. if state == STATE_HOME and cur_gps_is_home: gps = cur_lat, cur_lon gps_accuracy = cur_acc source_type = SOURCE_TYPE_GPS # Otherwise, if new GPS data is valid (which is unlikely if # new state is not 'home'), # use it and make source_type gps. elif gps: source_type = SOURCE_TYPE_GPS # Otherwise, don't use any GPS data, but set location_name to # new state. else: location_name = state else: self._bad_entity( entity_id, 'unsupported source_type: {}'.format(source_type), init) return tz = None if self._time_as in [TZ_DEVICE_UTC, TZ_DEVICE_LOCAL]: tzname = None if gps: # timezone_at will return a string or None. tzname = self._tf.timezone_at(lng=gps[1], lat=gps[0]) # get_time_zone will return a tzinfo or None. tz = dt_util.get_time_zone(tzname) attrs = {ATTR_TIME_ZONE: tzname or STATE_UNKNOWN} else: attrs = {} attrs.update({ ATTR_ENTITY_ID: tuple(entity_id for entity_id, entity in self._entities.items() if entity[ATTR_SOURCE_TYPE] is not None), ATTR_LAST_ENTITY_ID: entity_id, ATTR_LAST_SEEN: self._dt_attr_from_utc(last_seen.replace(microsecond=0), tz) }) if charging is not None: attrs[ATTR_BATTERY_CHARGING] = charging self._see(dev_id=self._dev_id, location_name=location_name, gps=gps, gps_accuracy=gps_accuracy, battery=battery, attributes=attrs, source_type=source_type) self._prev_seen = last_seen
async def get(self, request: web.Request, datetime: Optional[str] = None) -> web.Response: """Return history over a period of time.""" datetime_ = None if datetime: datetime_ = dt_util.parse_datetime(datetime) if datetime_ is None: return self.json_message("Invalid datetime", HTTP_BAD_REQUEST) now = dt_util.utcnow() one_day = timedelta(days=1) if datetime_: start_time = dt_util.as_utc(datetime_) else: start_time = now - one_day if start_time > now: return self.json([]) end_time_str = request.query.get("end_time") if end_time_str: end_time = dt_util.parse_datetime(end_time_str) if end_time: end_time = dt_util.as_utc(end_time) else: return self.json_message("Invalid end_time", HTTP_BAD_REQUEST) else: end_time = start_time + one_day entity_ids_str = request.query.get("filter_entity_id") entity_ids = None if entity_ids_str: entity_ids = entity_ids_str.lower().split(",") include_start_time_state = "skip_initial_state" not in request.query significant_changes_only = (request.query.get( "significant_changes_only", "1") != "0") minimal_response = "minimal_response" in request.query hass = request.app["hass"] if (not include_start_time_state and entity_ids and not _entities_may_have_state_changes_after( hass, entity_ids, start_time)): return self.json([]) return cast( web.Response, await hass.async_add_executor_job( self._sorted_significant_states_json, hass, start_time, end_time, entity_ids, include_start_time_state, significant_changes_only, minimal_response, ), )
native_unit_of_measurement=ENERGY_KILO_WATT_HOUR, value=lambda usage: usage.electricity[-1].consumption, ), OVOEnergySensorEntityDescription( key=KEY_LAST_ELECTRICITY_COST, name="OVO Last Electricity Cost", device_class=SensorDeviceClass.MONETARY, state_class=SensorStateClass.TOTAL_INCREASING, value=lambda usage: usage.electricity[-1].cost.amount, ), OVOEnergySensorEntityDescription( key="last_electricity_start_time", name="OVO Last Electricity Start Time", entity_registry_enabled_default=False, device_class=SensorDeviceClass.TIMESTAMP, value=lambda usage: dt_util.as_utc(usage.electricity[-1].interval.start ), ), OVOEnergySensorEntityDescription( key="last_electricity_end_time", name="OVO Last Electricity End Time", entity_registry_enabled_default=False, device_class=SensorDeviceClass.TIMESTAMP, value=lambda usage: dt_util.as_utc(usage.electricity[-1].interval.end), ), ) SENSOR_TYPES_GAS: tuple[OVOEnergySensorEntityDescription, ...] = ( OVOEnergySensorEntityDescription( key="last_gas_reading", name="OVO Last Gas Reading", device_class=SensorDeviceClass.ENERGY,
def _get_utc_value(entity: RenaultSensor[T]) -> str: """Return the UTC value of this entity.""" original_dt = parse_datetime(cast(str, entity.data)) if TYPE_CHECKING: assert original_dt is not None return as_utc(original_dt).isoformat()
def test_monthly_statistics(hass_recorder, caplog, timezone): """Test inserting external statistics.""" dt_util.set_default_time_zone(dt_util.get_time_zone(timezone)) hass = hass_recorder() wait_recording_done(hass) assert "Compiling statistics for" not in caplog.text assert "Statistics already compiled" not in caplog.text zero = dt_util.utcnow() period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00")) period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00")) period3 = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00")) period4 = dt_util.as_utc(dt_util.parse_datetime("2021-10-31 23:00:00")) external_statistics = ( { "start": period1, "last_reset": None, "state": 0, "sum": 2, }, { "start": period2, "last_reset": None, "state": 1, "sum": 3, }, { "start": period3, "last_reset": None, "state": 2, "sum": 4, }, { "start": period4, "last_reset": None, "state": 3, "sum": 5, }, ) external_metadata = { "has_mean": False, "has_sum": True, "name": "Total imported energy", "source": "test", "statistic_id": "test:total_energy_import", "unit_of_measurement": "kWh", } async_add_external_statistics(hass, external_metadata, external_statistics) wait_recording_done(hass) stats = statistics_during_period(hass, zero, period="month") sep_start = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00")) sep_end = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00")) oct_start = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00")) oct_end = dt_util.as_utc(dt_util.parse_datetime("2021-11-01 00:00:00")) assert stats == { "test:total_energy_import": [ { "statistic_id": "test:total_energy_import", "start": sep_start.isoformat(), "end": sep_end.isoformat(), "max": None, "mean": None, "min": None, "last_reset": None, "state": approx(1.0), "sum": approx(3.0), }, { "statistic_id": "test:total_energy_import", "start": oct_start.isoformat(), "end": oct_end.isoformat(), "max": None, "mean": None, "min": None, "last_reset": None, "state": approx(3.0), "sum": approx(5.0), }, ] } dt_util.set_default_time_zone(dt_util.get_time_zone("UTC"))
def async_fire_time_changed(hass, time): """Fire a time changes event.""" hass.bus.async_fire(EVENT_TIME_CHANGED, {"now": date_util.as_utc(time)})
def update(self): """Update probe data.""" self.api.login(self.username, self.password) _LOGGER.debug("Updating data for %s (%s)", self.device_id, self.growatt_type) try: if self.growatt_type == "total": total_info = self.api.plant_info(self.device_id) del total_info["deviceList"] # PlantMoneyText comes in as "3.1/€" split between value and currency plant_money_text, currency = total_info["plantMoneyText"].split("/") total_info["plantMoneyText"] = plant_money_text total_info["currency"] = currency self.data = total_info elif self.growatt_type == "inverter": inverter_info = self.api.inverter_detail(self.device_id) self.data = inverter_info elif self.growatt_type == "tlx": tlx_info = self.api.tlx_detail(self.device_id) self.data = tlx_info["data"] elif self.growatt_type == "storage": storage_info_detail = self.api.storage_params(self.device_id)[ "storageDetailBean" ] storage_energy_overview = self.api.storage_energy_overview( self.plant_id, self.device_id ) self.data = {**storage_info_detail, **storage_energy_overview} elif self.growatt_type == "mix": mix_info = self.api.mix_info(self.device_id) mix_totals = self.api.mix_totals(self.device_id, self.plant_id) mix_system_status = self.api.mix_system_status( self.device_id, self.plant_id ) mix_detail = self.api.mix_detail(self.device_id, self.plant_id) # Get the chart data and work out the time of the last entry, use this as the last time data was published to the Growatt Server mix_chart_entries = mix_detail["chartData"] sorted_keys = sorted(mix_chart_entries) # Create datetime from the latest entry date_now = dt.now().date() last_updated_time = dt.parse_time(str(sorted_keys[-1])) combined_timestamp = datetime.datetime.combine( date_now, last_updated_time ) # Convert datetime to UTC combined_timestamp_utc = dt.as_utc(combined_timestamp) mix_detail["lastdataupdate"] = combined_timestamp_utc.isoformat() # Dashboard data is largely inaccurate for mix system but it is the only call with the ability to return the combined # imported from grid value that is the combination of charging AND load consumption dashboard_data = self.api.dashboard_data(self.plant_id) # Dashboard values have units e.g. "kWh" as part of their returned string, so we remove it dashboard_values_for_mix = { # etouser is already used by the results from 'mix_detail' so we rebrand it as 'etouser_combined' "etouser_combined": float( dashboard_data["etouser"].replace("kWh", "") ) } self.data = { **mix_info, **mix_totals, **mix_system_status, **mix_detail, **dashboard_values_for_mix, } except json.decoder.JSONDecodeError: _LOGGER.error("Unable to fetch data from Growatt server")
def _convert_weather_response(self, weather_response): """Format the weather response correctly.""" if not weather_response or not weather_response.hourly: return None elaborated = dt_util.parse_datetime( weather_response.hourly[ATTR_DATA][0][AEMET_ATTR_ELABORATED] + "Z") now = dt_util.now() now_utc = dt_util.utcnow() hour = now.hour # Get current day day = None for cur_day in weather_response.hourly[ATTR_DATA][0][ AEMET_ATTR_FORECAST][AEMET_ATTR_DAY]: cur_day_date = dt_util.parse_datetime(cur_day[AEMET_ATTR_DATE]) if now.date() == cur_day_date.date(): day = cur_day break # Get latest station data station_data = None station_dt = None if weather_response.station: for _station_data in weather_response.station[ATTR_DATA]: if AEMET_ATTR_STATION_DATE in _station_data: _station_dt = dt_util.parse_datetime( _station_data[AEMET_ATTR_STATION_DATE] + "Z") if not station_dt or _station_dt > station_dt: station_data = _station_data station_dt = _station_dt condition = None humidity = None pressure = None rain = None rain_prob = None snow = None snow_prob = None station_id = None station_name = None station_timestamp = None storm_prob = None temperature = None temperature_feeling = None town_id = None town_name = None town_timestamp = dt_util.as_utc(elaborated).isoformat() wind_bearing = None wind_max_speed = None wind_speed = None # Get weather values if day: condition = self._get_condition(day, hour) humidity = self._get_humidity(day, hour) rain = self._get_rain(day, hour) rain_prob = self._get_rain_prob(day, hour) snow = self._get_snow(day, hour) snow_prob = self._get_snow_prob(day, hour) station_id = self._get_station_id() station_name = self._get_station_name() storm_prob = self._get_storm_prob(day, hour) temperature = self._get_temperature(day, hour) temperature_feeling = self._get_temperature_feeling(day, hour) town_id = self._get_town_id() town_name = self._get_town_name() wind_bearing = self._get_wind_bearing(day, hour) wind_max_speed = self._get_wind_max_speed(day, hour) wind_speed = self._get_wind_speed(day, hour) # Overwrite weather values with closest station data (if present) if station_data: station_timestamp = dt_util.as_utc(station_dt).isoformat() if (now_utc - station_dt) <= STATION_MAX_DELTA: if AEMET_ATTR_STATION_HUMIDITY in station_data: humidity = format_float( station_data[AEMET_ATTR_STATION_HUMIDITY]) if AEMET_ATTR_STATION_PRESSURE_SEA in station_data: pressure = format_float( station_data[AEMET_ATTR_STATION_PRESSURE_SEA]) if AEMET_ATTR_STATION_TEMPERATURE in station_data: temperature = format_float( station_data[AEMET_ATTR_STATION_TEMPERATURE]) else: _LOGGER.warning("Station data is outdated") # Get forecast from weather data forecast_daily = self._get_daily_forecast_from_weather_response( weather_response, now) forecast_hourly = self._get_hourly_forecast_from_weather_response( weather_response, now) return { ATTR_API_CONDITION: condition, ATTR_API_FORECAST_DAILY: forecast_daily, ATTR_API_FORECAST_HOURLY: forecast_hourly, ATTR_API_HUMIDITY: humidity, ATTR_API_TEMPERATURE: temperature, ATTR_API_TEMPERATURE_FEELING: temperature_feeling, ATTR_API_PRESSURE: pressure, ATTR_API_RAIN: rain, ATTR_API_RAIN_PROB: rain_prob, ATTR_API_SNOW: snow, ATTR_API_SNOW_PROB: snow_prob, ATTR_API_STATION_ID: station_id, ATTR_API_STATION_NAME: station_name, ATTR_API_STATION_TIMESTAMP: station_timestamp, ATTR_API_STORM_PROB: storm_prob, ATTR_API_TOWN_ID: town_id, ATTR_API_TOWN_NAME: town_name, ATTR_API_TOWN_TIMESTAMP: town_timestamp, ATTR_API_WIND_BEARING: wind_bearing, ATTR_API_WIND_MAX_SPEED: wind_max_speed, ATTR_API_WIND_SPEED: wind_speed, }
def test_norwegian_case_summer(self): """Test location in Norway where the sun doesn't set in summer.""" self.hass.config.latitude = 69.6 self.hass.config.longitude = 18.8 test_time = self.hass.config.time_zone.localize(datetime( 2010, 6, 1)).astimezone(pytz.UTC) sunrise = dt_util.as_local( get_astral_event_next(self.hass, "sunrise", dt_util.as_utc(test_time))) sunset = dt_util.as_local( get_astral_event_next(self.hass, "sunset", dt_util.as_utc(test_time))) config = { "binary_sensor": [{ "platform": "tod", "name": "Day", "after": "sunrise", "before": "sunset", }] } entity_id = "binary_sensor.day" testtime = test_time with patch( "homeassistant.components.tod.binary_sensor.dt_util.utcnow", return_value=testtime, ): setup_component(self.hass, "binary_sensor", config) self.hass.block_till_done() self.hass.block_till_done() state = self.hass.states.get(entity_id) assert state.state == STATE_OFF testtime = sunrise + timedelta(seconds=-1) with patch( "homeassistant.components.tod.binary_sensor.dt_util.utcnow", return_value=testtime, ): self.hass.bus.fire(ha.EVENT_TIME_CHANGED, {ha.ATTR_NOW: testtime}) self.hass.block_till_done() state = self.hass.states.get(entity_id) assert state.state == STATE_OFF testtime = sunrise with patch( "homeassistant.components.tod.binary_sensor.dt_util.utcnow", return_value=testtime, ): self.hass.bus.fire(ha.EVENT_TIME_CHANGED, {ha.ATTR_NOW: testtime}) self.hass.block_till_done() state = self.hass.states.get(entity_id) assert state.state == STATE_ON testtime = sunrise + timedelta(seconds=1) with patch( "homeassistant.components.tod.binary_sensor.dt_util.utcnow", return_value=testtime, ): self.hass.bus.fire(ha.EVENT_TIME_CHANGED, {ha.ATTR_NOW: testtime}) self.hass.block_till_done() state = self.hass.states.get(entity_id) assert state.state == STATE_ON self.hass.block_till_done() testtime = sunset + timedelta(seconds=-1) with patch( "homeassistant.components.tod.binary_sensor.dt_util.utcnow", return_value=testtime, ): self.hass.bus.fire(ha.EVENT_TIME_CHANGED, {ha.ATTR_NOW: testtime}) self.hass.block_till_done() state = self.hass.states.get(entity_id) assert state.state == STATE_ON self.hass.block_till_done() testtime = sunset with patch( "homeassistant.components.tod.binary_sensor.dt_util.utcnow", return_value=testtime, ): self.hass.bus.fire(ha.EVENT_TIME_CHANGED, {ha.ATTR_NOW: testtime}) self.hass.block_till_done() state = self.hass.states.get(entity_id) assert state.state == STATE_OFF self.hass.block_till_done() testtime = sunset + timedelta(seconds=1) with patch( "homeassistant.components.tod.binary_sensor.dt_util.utcnow", return_value=testtime, ): self.hass.bus.fire(ha.EVENT_TIME_CHANGED, {ha.ATTR_NOW: testtime}) self.hass.block_till_done() state = self.hass.states.get(entity_id) assert state.state == STATE_OFF
class WeatherUpdateCoordinator(DataUpdateCoordinator): """Weather data update coordinator.""" def __init__(self, hass, aemet, latitude, longitude, station_updates): """Initialize coordinator.""" super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=WEATHER_UPDATE_INTERVAL) self._aemet = aemet self._station = None self._town = None self._latitude = latitude self._longitude = longitude self._station_updates = station_updates self._data = { "daily": None, "hourly": None, "station": None, } async def _async_update_data(self): data = {} with async_timeout.timeout(120): weather_response = await self._get_aemet_weather() data = self._convert_weather_response(weather_response) return data async def _get_aemet_weather(self): """Poll weather data from AEMET OpenData.""" weather = await self.hass.async_add_executor_job( self._get_weather_and_forecast) return weather def _get_weather_station(self): if not self._station: self._station = ( self._aemet. get_conventional_observation_station_by_coordinates( self._latitude, self._longitude)) if self._station: _LOGGER.debug( "station found for coordinates [%s, %s]: %s", self._latitude, self._longitude, self._station, ) if not self._station: _LOGGER.debug( "station not found for coordinates [%s, %s]", self._latitude, self._longitude, ) return self._station def _get_weather_town(self): if not self._town: self._town = self._aemet.get_town_by_coordinates( self._latitude, self._longitude) if self._town: _LOGGER.debug( "Town found for coordinates [%s, %s]: %s", self._latitude, self._longitude, self._town, ) if not self._town: _LOGGER.error( "Town not found for coordinates [%s, %s]", self._latitude, self._longitude, ) raise TownNotFound return self._town def _get_weather_and_forecast(self): """Get weather and forecast data from AEMET OpenData.""" self._get_weather_town() daily = self._aemet.get_specific_forecast_town_daily( self._town[AEMET_ATTR_ID]) if not daily: _LOGGER.error('Error fetching daily data for town "%s"', self._town[AEMET_ATTR_ID]) hourly = self._aemet.get_specific_forecast_town_hourly( self._town[AEMET_ATTR_ID]) if not hourly: _LOGGER.error('Error fetching hourly data for town "%s"', self._town[AEMET_ATTR_ID]) station = None if self._station_updates and self._get_weather_station(): station = self._aemet.get_conventional_observation_station_data( self._station[AEMET_ATTR_IDEMA]) if not station: _LOGGER.error( 'Error fetching data for station "%s"', self._station[AEMET_ATTR_IDEMA], ) if daily: self._data["daily"] = daily if hourly: self._data["hourly"] = hourly if station: self._data["station"] = station return AemetWeather( self._data["daily"], self._data["hourly"], self._data["station"], ) def _convert_weather_response(self, weather_response): """Format the weather response correctly.""" if not weather_response or not weather_response.hourly: return None elaborated = dt_util.parse_datetime( weather_response.hourly[ATTR_DATA][0][AEMET_ATTR_ELABORATED] + "Z") now = dt_util.now() now_utc = dt_util.utcnow() hour = now.hour # Get current day day = None for cur_day in weather_response.hourly[ATTR_DATA][0][ AEMET_ATTR_FORECAST][AEMET_ATTR_DAY]: cur_day_date = dt_util.parse_datetime(cur_day[AEMET_ATTR_DATE]) if now.date() == cur_day_date.date(): day = cur_day break # Get latest station data station_data = None station_dt = None if weather_response.station: for _station_data in weather_response.station[ATTR_DATA]: if AEMET_ATTR_STATION_DATE in _station_data: _station_dt = dt_util.parse_datetime( _station_data[AEMET_ATTR_STATION_DATE] + "Z") if not station_dt or _station_dt > station_dt: station_data = _station_data station_dt = _station_dt condition = None humidity = None pressure = None rain = None rain_prob = None snow = None snow_prob = None station_id = None station_name = None station_timestamp = None storm_prob = None temperature = None temperature_feeling = None town_id = None town_name = None town_timestamp = dt_util.as_utc(elaborated).isoformat() wind_bearing = None wind_max_speed = None wind_speed = None # Get weather values if day: condition = self._get_condition(day, hour) humidity = self._get_humidity(day, hour) rain = self._get_rain(day, hour) rain_prob = self._get_rain_prob(day, hour) snow = self._get_snow(day, hour) snow_prob = self._get_snow_prob(day, hour) station_id = self._get_station_id() station_name = self._get_station_name() storm_prob = self._get_storm_prob(day, hour) temperature = self._get_temperature(day, hour) temperature_feeling = self._get_temperature_feeling(day, hour) town_id = self._get_town_id() town_name = self._get_town_name() wind_bearing = self._get_wind_bearing(day, hour) wind_max_speed = self._get_wind_max_speed(day, hour) wind_speed = self._get_wind_speed(day, hour) # Overwrite weather values with closest station data (if present) if station_data: station_timestamp = dt_util.as_utc(station_dt).isoformat() if (now_utc - station_dt) <= STATION_MAX_DELTA: if AEMET_ATTR_STATION_HUMIDITY in station_data: humidity = format_float( station_data[AEMET_ATTR_STATION_HUMIDITY]) if AEMET_ATTR_STATION_PRESSURE_SEA in station_data: pressure = format_float( station_data[AEMET_ATTR_STATION_PRESSURE_SEA]) if AEMET_ATTR_STATION_TEMPERATURE in station_data: temperature = format_float( station_data[AEMET_ATTR_STATION_TEMPERATURE]) else: _LOGGER.warning("Station data is outdated") # Get forecast from weather data forecast_daily = self._get_daily_forecast_from_weather_response( weather_response, now) forecast_hourly = self._get_hourly_forecast_from_weather_response( weather_response, now) return { ATTR_API_CONDITION: condition, ATTR_API_FORECAST_DAILY: forecast_daily, ATTR_API_FORECAST_HOURLY: forecast_hourly, ATTR_API_HUMIDITY: humidity, ATTR_API_TEMPERATURE: temperature, ATTR_API_TEMPERATURE_FEELING: temperature_feeling, ATTR_API_PRESSURE: pressure, ATTR_API_RAIN: rain, ATTR_API_RAIN_PROB: rain_prob, ATTR_API_SNOW: snow, ATTR_API_SNOW_PROB: snow_prob, ATTR_API_STATION_ID: station_id, ATTR_API_STATION_NAME: station_name, ATTR_API_STATION_TIMESTAMP: station_timestamp, ATTR_API_STORM_PROB: storm_prob, ATTR_API_TOWN_ID: town_id, ATTR_API_TOWN_NAME: town_name, ATTR_API_TOWN_TIMESTAMP: town_timestamp, ATTR_API_WIND_BEARING: wind_bearing, ATTR_API_WIND_MAX_SPEED: wind_max_speed, ATTR_API_WIND_SPEED: wind_speed, } def _get_daily_forecast_from_weather_response(self, weather_response, now): if weather_response.daily: parse = False forecast = [] for day in weather_response.daily[ATTR_DATA][0][ AEMET_ATTR_FORECAST][AEMET_ATTR_DAY]: day_date = dt_util.parse_datetime(day[AEMET_ATTR_DATE]) if now.date() == day_date.date(): parse = True if parse: cur_forecast = self._convert_forecast_day(day_date, day) if cur_forecast: forecast.append(cur_forecast) return forecast return None def _get_hourly_forecast_from_weather_response(self, weather_response, now): if weather_response.hourly: parse = False hour = now.hour forecast = [] for day in weather_response.hourly[ATTR_DATA][0][ AEMET_ATTR_FORECAST][AEMET_ATTR_DAY]: day_date = dt_util.parse_datetime(day[AEMET_ATTR_DATE]) hour_start = 0 if now.date() == day_date.date(): parse = True hour_start = now.hour if parse: for hour in range(hour_start, 24): cur_forecast = self._convert_forecast_hour( day_date, day, hour) if cur_forecast: forecast.append(cur_forecast) return forecast return None def _convert_forecast_day(self, date, day): if not (condition := self._get_condition_day(day)): return None return { ATTR_FORECAST_CONDITION: condition, ATTR_FORECAST_PRECIPITATION_PROBABILITY: self._get_precipitation_prob_day(day), ATTR_FORECAST_TEMP: self._get_temperature_day(day), ATTR_FORECAST_TEMP_LOW: self._get_temperature_low_day(day), ATTR_FORECAST_TIME: dt_util.as_utc(date).isoformat(), ATTR_FORECAST_WIND_SPEED: self._get_wind_speed_day(day), ATTR_FORECAST_WIND_BEARING: self._get_wind_bearing_day(day), }
def run(self): """Start processing events to save.""" from .models import States, Events from homeassistant.components import persistent_notification from sqlalchemy import exc tries = 1 connected = False while not connected and tries <= 10: if tries != 1: time.sleep(CONNECT_RETRY_WAIT) try: self._setup_connection() migration.migrate_schema(self) self._setup_run() connected = True _LOGGER.debug("Connected to recorder database") except Exception as err: # pylint: disable=broad-except _LOGGER.error( "Error during connection setup: %s (retrying " "in %s seconds)", err, CONNECT_RETRY_WAIT) tries += 1 if not connected: @callback def connection_failed(): """Connect failed tasks.""" self.async_db_ready.set_result(False) persistent_notification.async_create( self.hass, "The recorder could not start, please check the log", "Recorder") self.hass.add_job(connection_failed) return shutdown_task = object() hass_started = concurrent.futures.Future() @callback def register(): """Post connection initialize.""" self.async_db_ready.set_result(True) def shutdown(event): """Shut down the Recorder.""" if not hass_started.done(): hass_started.set_result(shutdown_task) self.queue.put(None) self.join() self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shutdown) if self.hass.state == CoreState.running: hass_started.set_result(None) else: @callback def notify_hass_started(event): """Notify that hass has started.""" hass_started.set_result(None) self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, notify_hass_started) self.hass.add_job(register) result = hass_started.result() # If shutdown happened before Home Assistant finished starting if result is shutdown_task: return # Start periodic purge if self.keep_days and self.purge_interval: @callback def async_purge(now): """Trigger the purge and schedule the next run.""" self.queue.put(PurgeTask(self.keep_days, repack=False)) self.hass.helpers.event.async_track_point_in_time( async_purge, now + timedelta(days=self.purge_interval)) earliest = dt_util.utcnow() + timedelta(minutes=30) run = latest = dt_util.utcnow() + \ timedelta(days=self.purge_interval) with session_scope(session=self.get_session()) as session: event = session.query(Events).first() if event is not None: session.expunge(event) run = dt_util.as_utc(event.time_fired) + timedelta( days=self.keep_days + self.purge_interval) run = min(latest, max(run, earliest)) self.hass.helpers.event.track_point_in_time(async_purge, run) while True: event = self.queue.get() if event is None: self._close_run() self._close_connection() self.queue.task_done() return if isinstance(event, PurgeTask): purge.purge_old_data(self, event.keep_days, event.repack) self.queue.task_done() continue elif event.event_type == EVENT_TIME_CHANGED: self.queue.task_done() continue elif event.event_type in self.exclude_t: self.queue.task_done() continue entity_id = event.data.get(ATTR_ENTITY_ID) if entity_id is not None: if not self.entity_filter(entity_id): self.queue.task_done() continue tries = 1 updated = False while not updated and tries <= 10: if tries != 1: time.sleep(CONNECT_RETRY_WAIT) try: with session_scope(session=self.get_session()) as session: try: dbevent = Events.from_event(event) session.add(dbevent) session.flush() except (TypeError, ValueError): _LOGGER.warning( "Event is not JSON serializable: %s", event) if event.event_type == EVENT_STATE_CHANGED: try: dbstate = States.from_event(event) dbstate.event_id = dbevent.event_id session.add(dbstate) except (TypeError, ValueError): _LOGGER.warning( "State is not JSON serializable: %s", event.data.get('new_state')) updated = True except exc.OperationalError as err: _LOGGER.error( "Error in database connectivity: %s. " "(retrying in %s seconds)", err, CONNECT_RETRY_WAIT) tries += 1 except exc.SQLAlchemyError: updated = True _LOGGER.exception("Error saving event: %s", event) if not updated: _LOGGER.error( "Error in database update. Could not save " "after %d tries. Giving up", tries) self.queue.task_done()
if not (condition := self._get_condition(day, hour)): return None forecast_dt = date.replace(hour=hour, minute=0, second=0) return { ATTR_FORECAST_CONDITION: condition, ATTR_FORECAST_PRECIPITATION: self._calc_precipitation(day, hour), ATTR_FORECAST_PRECIPITATION_PROBABILITY: self._calc_precipitation_prob(day, hour), ATTR_FORECAST_TEMP: self._get_temperature(day, hour), ATTR_FORECAST_TIME: dt_util.as_utc(forecast_dt).isoformat(), ATTR_FORECAST_WIND_SPEED: self._get_wind_speed(day, hour), ATTR_FORECAST_WIND_BEARING: self._get_wind_bearing(day, hour), } def _calc_precipitation(self, day, hour): """Calculate the precipitation.""" rain_value = self._get_rain(day, hour) or 0 snow_value = self._get_snow(day, hour) or 0 if round(rain_value + snow_value, 1) == 0: return None return round(rain_value + snow_value, 1)
def test_as_utc_with_utc_object(self): utcnow = dt_util.utcnow() self.assertEqual(utcnow, dt_util.as_utc(utcnow))
utc_now = dt_util.utcnow() if start_time := dt_util.parse_datetime(start_time_str): start_time = dt_util.as_utc(start_time) if not start_time or start_time > utc_now: connection.send_error(msg_id, "invalid_start_time", "Invalid start_time") return end_time_str = msg.get("end_time") end_time: dt | None = None if end_time_str: if not (end_time := dt_util.parse_datetime(end_time_str)): connection.send_error(msg_id, "invalid_end_time", "Invalid end_time") return end_time = dt_util.as_utc(end_time) if end_time < start_time: connection.send_error(msg_id, "invalid_end_time", "Invalid end_time") return device_ids = msg.get("device_ids") entity_ids = msg.get("entity_ids") if entity_ids: entity_ids = async_filter_entities(hass, entity_ids) if not entity_ids: _async_send_empty_response(connection, msg_id, start_time, end_time) return event_types = async_determine_event_types(hass, entity_ids, device_ids) event_processor = EventProcessor( hass,
async def get(self, request, datetime=None): """Retrieve logbook entries.""" if datetime: datetime = dt_util.parse_datetime(datetime) if datetime is None: return self.json_message("Invalid datetime", HTTP_BAD_REQUEST) else: datetime = dt_util.start_of_local_day() period = request.query.get("period") if period is None: period = 1 else: period = int(period) entity_ids = request.query.get("entity") if entity_ids: try: entity_ids = cv.entity_ids(entity_ids) except vol.Invalid: raise InvalidEntityFormatError( f"Invalid entity id(s) encountered: {entity_ids}. " "Format should be <domain>.<object_id>" ) from vol.Invalid end_time = request.query.get("end_time") if end_time is None: start_day = dt_util.as_utc(datetime) - timedelta(days=period - 1) end_day = start_day + timedelta(days=period) else: start_day = datetime end_day = dt_util.parse_datetime(end_time) if end_day is None: return self.json_message("Invalid end_time", HTTP_BAD_REQUEST) hass = request.app["hass"] entity_matches_only = "entity_matches_only" in request.query context_id = request.query.get("context_id") if entity_ids and context_id: return self.json_message( "Can't combine entity with context_id", HTTP_BAD_REQUEST ) def json_events(): """Fetch events and generate JSON.""" return self.json( _get_events( hass, start_day, end_day, entity_ids, self.filters, self.entities_filter, entity_matches_only, context_id, ) ) return await hass.async_add_executor_job(json_events)
def handle_state_change( hass: HomeAssistant, config_entry: ConfigEntry, changed_entity: str, old_state: State, new_state: State, ) -> None: """Listener to track state changes to lock entities.""" primary_lock: KeymasterLock = hass.data[DOMAIN][ config_entry.entry_id][PRIMARY_LOCK] child_locks: List[KeymasterLock] = hass.data[DOMAIN][ config_entry.entry_id][CHILD_LOCKS] for lock in [primary_lock, *child_locks]: # Don't do anything if the changed entity is not this lock if changed_entity != lock.lock_entity_id: continue # Determine action type to set appropriate action text using ACTION_MAP action_type = "" if lock.alarm_type_or_access_control_entity_id and ( ALARM_TYPE in lock.alarm_type_or_access_control_entity_id or ALARM_TYPE.replace( "_", "") in lock.alarm_type_or_access_control_entity_id): action_type = ALARM_TYPE if (lock.alarm_type_or_access_control_entity_id and ACCESS_CONTROL in lock.alarm_type_or_access_control_entity_id): action_type = ACCESS_CONTROL # Get alarm_level/usercode and alarm_type/access_control states alarm_level_state = hass.states.get( lock.alarm_level_or_user_code_entity_id) alarm_level_value = (int(alarm_level_state.state) if alarm_level_state and alarm_level_state.state not in ( STATE_UNKNOWN, STATE_UNAVAILABLE) else None) alarm_type_state = hass.states.get( lock.alarm_type_or_access_control_entity_id) alarm_type_value = (int(alarm_type_state.state) if alarm_type_state and alarm_type_state.state not in ( STATE_UNKNOWN, STATE_UNAVAILABLE) else None) # Bail out if we can't use the sensors to provide a meaningful message if alarm_level_value is None or alarm_type_value is None: return # If lock has changed state but alarm_type/access_control state hasn't changed # in a while set action_value to RF lock/unlock if (alarm_level_state is not None and int(alarm_level_state.state) == 0 and dt_util.utcnow() - dt_util.as_utc( alarm_type_state.last_changed) > timedelta(seconds=5) and action_type in LOCK_STATE_MAP): alarm_type_value = LOCK_STATE_MAP[action_type][new_state.state] # Lookup action text based on alarm type value action_text = (ACTION_MAP.get(action_type, {}).get( alarm_type_value, "Unknown Alarm Type Value") if alarm_type_value is not None else None) # Lookup name for usercode code_slot_name_state = hass.states.get( f"input_text.{lock.lock_name}_name_{alarm_level_value}") # Fire state change event hass.bus.fire( EVENT_KEYMASTER_LOCK_STATE_CHANGED, event_data={ ATTR_NOTIFICATION_SOURCE: "entity_state", ATTR_NAME: lock.lock_name, ATTR_ENTITY_ID: lock.lock_entity_id, ATTR_STATE: new_state.state, ATTR_ACTION_CODE: alarm_type_value, ATTR_ACTION_TEXT: action_text, ATTR_CODE_SLOT: alarm_level_value or 0, ATTR_CODE_SLOT_NAME: code_slot_name_state.state if code_slot_name_state is not None else "", }, ) return
def native_value(self): """Return the state of the sensor.""" if self._state is None: return None return dt_util.as_utc(self._state).isoformat()
async def _async_update_state(self, ): # pylint: disable=too-many-locals,too-many-branches,too-many-statements """Update the sensor state.""" _LOGGER.debug('Updating sensor "%s"', self.name) start = end = start_ts = end_ts = None p_period = self._period # Parse templates await self._async_update_period() if self._period is not None: now = datetime.datetime.now() start, end = self._period if p_period is None: p_start = p_end = now else: p_start, p_end = p_period # Convert times to UTC start = dt_util.as_utc(start) end = dt_util.as_utc(end) p_start = dt_util.as_utc(p_start) p_end = dt_util.as_utc(p_end) # Compute integer timestamps now_ts = math.floor(dt_util.as_timestamp(now)) start_ts = math.floor(dt_util.as_timestamp(start)) end_ts = math.floor(dt_util.as_timestamp(end)) p_start_ts = math.floor(dt_util.as_timestamp(p_start)) p_end_ts = math.floor(dt_util.as_timestamp(p_end)) # If period has not changed and current time after the period end.. if start_ts == p_start_ts and end_ts == p_end_ts and end_ts <= now_ts: # Don't compute anything as the value cannot have changed return self.available_sources = 0 values = [] self.count = 0 self.min_value = self.max_value = None # pylint: disable=too-many-nested-blocks for entity_id in self.sources: _LOGGER.debug('Processing entity "%s"', entity_id) state = self.hass.states.get(entity_id) # type: LazyState if state is None: _LOGGER.error('Unable to find an entity "%s"', entity_id) continue self._init_mode(state) value = 0 elapsed = 0 if self._period is None: # Get current state value = self._get_state_value(state) _LOGGER.debug("Current state: %s", value) else: # Get history between start and now history_list = await get_instance( self.hass).async_add_executor_job( history.state_changes_during_period, self.hass, start, end, str(entity_id), ) if (entity_id not in history_list.keys() or history_list[entity_id] is None or len(history_list[entity_id]) == 0): value = self._get_state_value(state) _LOGGER.warning( 'Historical data not found for entity "%s". ' "Current state used: %s", entity_id, value, ) else: # Get the first state item = history_list[entity_id][0] _LOGGER.debug("Initial historical state: %s", item) last_state = None last_time = start_ts if item is not None and self._has_state(item.state): last_state = self._get_state_value(item) # Get the other states for item in history_list.get(entity_id): _LOGGER.debug("Historical state: %s", item) current_state = self._get_state_value(item) current_time = item.last_changed.timestamp() if last_state is not None: last_elapsed = current_time - last_time value += last_state * last_elapsed elapsed += last_elapsed last_state = current_state last_time = current_time # Count time elapsed between last history state and now if last_state is not None: last_elapsed = end_ts - last_time value += last_state * last_elapsed elapsed += last_elapsed if elapsed: value /= elapsed _LOGGER.debug("Historical average state: %s", value) if isinstance(value, numbers.Number): values.append(value) self.available_sources += 1 if values: self._attr_native_value = round( sum(values) / len(values), self._precision) if self._precision < 1: self._attr_native_value = int(self._attr_native_value) else: self._attr_native_value = None _LOGGER.debug("Total average state: %s", self._attr_native_value)
def update(self) -> None: """Get the latest data from GTFS and update the states.""" with self.lock: # Fetch valid stop information once if not self._origin: stops = self._pygtfs.stops_by_id(self.origin) if not stops: self._available = False _LOGGER.warning("Origin stop ID %s not found", self.origin) return self._origin = stops[0] if not self._destination: stops = self._pygtfs.stops_by_id(self.destination) if not stops: self._available = False _LOGGER.warning("Destination stop ID %s not found", self.destination) return self._destination = stops[0] self._available = True # Fetch next departure self._departure = get_next_departure( self._pygtfs, self.origin, self.destination, self._offset, self._include_tomorrow, ) # Define the state as a UTC timestamp with ISO 8601 format if not self._departure: self._state = None else: self._state = dt_util.as_utc( self._departure["departure_time"]).isoformat() # Fetch trip and route details once, unless updated if not self._departure: self._trip = None else: trip_id = self._departure["trip_id"] if not self._trip or self._trip.trip_id != trip_id: _LOGGER.debug("Fetching trip details for %s", trip_id) self._trip = self._pygtfs.trips_by_id(trip_id)[0] route_id = self._departure["route_id"] if not self._route or self._route.route_id != route_id: _LOGGER.debug("Fetching route details for %s", route_id) self._route = self._pygtfs.routes_by_id(route_id)[0] # Fetch agency details exactly once if self._agency is None and self._route: _LOGGER.debug("Fetching agency details for %s", self._route.agency_id) try: self._agency = self._pygtfs.agencies_by_id( self._route.agency_id)[0] except IndexError: _LOGGER.warning( "Agency ID '%s' was not found in agency table, " "you may want to update the routes database table " "to fix this missing reference", self._route.agency_id, ) self._agency = False # Assign attributes, icon and name self.update_attributes() if self._route: self._icon = ICONS.get(self._route.route_type, ICON) else: self._icon = ICON name = "{agency} {origin} to {destination} next departure" if not self._departure: name = "{default}" self._name = self._custom_name or name.format( agency=getattr(self._agency, "agency_name", DEFAULT_NAME), default=DEFAULT_NAME, origin=self.origin, destination=self.destination, )
async def test_shabbat_times_sensor( hass, legacy_patchable_time, language, now, candle_lighting, havdalah, diaspora, tzname, latitude, longitude, result, ): """Test sensor output for upcoming shabbat/yomtov times.""" time_zone = dt_util.get_time_zone(tzname) test_time = now.replace(tzinfo=time_zone) hass.config.time_zone = tzname hass.config.latitude = latitude hass.config.longitude = longitude registry = er.async_get(hass) with alter_time(test_time): assert await async_setup_component( hass, jewish_calendar.DOMAIN, { "jewish_calendar": { "name": "test", "language": language, "diaspora": diaspora, "candle_lighting_minutes_before_sunset": candle_lighting, "havdalah_minutes_after_sunset": havdalah, } }, ) await hass.async_block_till_done() future = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() for sensor_type, result_value in result.items(): if not sensor_type.startswith(language): print(f"Not checking {sensor_type} for {language}") continue sensor_type = sensor_type.replace(f"{language}_", "") result_value = ( dt_util.as_utc(result_value) if isinstance(result_value, dt) else result_value ) assert hass.states.get(f"sensor.test_{sensor_type}").state == str( result_value ), f"Value for {sensor_type}" entity = registry.async_get(f"sensor.test_{sensor_type}") target_sensor_type = sensor_type.replace("parshat_hashavua", "weekly_portion") target_uid = "_".join( map( str, [ latitude, longitude, tzname, HDATE_DEFAULT_ALTITUDE, diaspora, language, candle_lighting, havdalah, target_sensor_type, ], ) ) assert entity.unique_id == target_uid
def setup_scanner(hass, config, see, discovery_info=None): """Set up the Bluetooth LE Scanner.""" new_devices = {} hass.data.setdefault(DATA_BLE, {DATA_BLE_ADAPTER: None}) def handle_stop(event): """Try to shut down the bluetooth child process nicely.""" # These should never be unset at the point this runs, but just for # safety's sake, use `get`. adapter = hass.data.get(DATA_BLE, {}).get(DATA_BLE_ADAPTER) if adapter is not None: adapter.kill() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, handle_stop) if config[CONF_TRACK_BATTERY]: battery_track_interval = config[CONF_TRACK_BATTERY_INTERVAL] else: battery_track_interval = timedelta(0) def see_device(address, name, new_device=False, battery=None): """Mark a device as seen.""" if name is not None: name = name.strip("\x00") if new_device: if address in new_devices: new_devices[address]["seen"] += 1 if name: new_devices[address]["name"] = name else: name = new_devices[address]["name"] _LOGGER.debug("Seen %s %s times", address, new_devices[address]["seen"]) if new_devices[address]["seen"] < MIN_SEEN_NEW: return _LOGGER.debug("Adding %s to tracked devices", address) devs_to_track.append(address) if battery_track_interval > timedelta(0): devs_track_battery[address] = dt_util.as_utc( datetime.fromtimestamp(0)) else: _LOGGER.debug("Seen %s for the first time", address) new_devices[address] = {"seen": 1, "name": name} return see( mac=BLE_PREFIX + address, host_name=name, source_type=SOURCE_TYPE_BLUETOOTH_LE, battery=battery, ) def discover_ble_devices(): """Discover Bluetooth LE devices.""" _LOGGER.debug("Discovering Bluetooth LE devices") try: adapter = pygatt.GATTToolBackend() hass.data[DATA_BLE][DATA_BLE_ADAPTER] = adapter devs = adapter.scan() devices = {x["address"]: x["name"] for x in devs} _LOGGER.debug("Bluetooth LE devices discovered = %s", devices) except (RuntimeError, pygatt.exceptions.BLEError) as error: _LOGGER.error("Error during Bluetooth LE scan: %s", error) return {} return devices yaml_path = hass.config.path(YAML_DEVICES) devs_to_track = [] devs_donot_track = [] devs_track_battery = {} # Load all known devices. # We just need the devices so set consider_home and home range # to 0 for device in asyncio.run_coroutine_threadsafe( async_load_config(yaml_path, hass, 0), hass.loop).result(): # check if device is a valid bluetooth device if device.mac and device.mac[:4].upper() == BLE_PREFIX: address = device.mac[4:] if device.track: _LOGGER.debug("Adding %s to BLE tracker", device.mac) devs_to_track.append(address) if battery_track_interval > timedelta(0): devs_track_battery[address] = dt_util.as_utc( datetime.fromtimestamp(0)) else: _LOGGER.debug("Adding %s to BLE do not track", device.mac) devs_donot_track.append(address) # if track new devices is true discover new devices # on every scan. track_new = config.get(CONF_TRACK_NEW) if not devs_to_track and not track_new: _LOGGER.warning("No Bluetooth LE devices to track!") return False interval = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL) def update_ble(now): """Lookup Bluetooth LE devices and update status.""" devs = discover_ble_devices() if devs_track_battery: adapter = hass.data[DATA_BLE][DATA_BLE_ADAPTER] for mac in devs_to_track: if mac not in devs: continue if devs[mac] is None: devs[mac] = mac battery = None if (mac in devs_track_battery and now > devs_track_battery[mac] + battery_track_interval): handle = None try: adapter.start(reset_on_start=False) _LOGGER.debug("Reading battery for Bluetooth LE device %s", mac) bt_device = adapter.connect(mac) # Try to get the handle; it will raise a BLEError exception if not available handle = bt_device.get_handle(BATTERY_CHARACTERISTIC_UUID) battery = ord( bt_device.char_read(BATTERY_CHARACTERISTIC_UUID)) devs_track_battery[mac] = now except pygatt.exceptions.NotificationTimeout: _LOGGER.warning( "Timeout when trying to get battery status") except pygatt.exceptions.BLEError as err: _LOGGER.warning("Could not read battery status: %s", err) if handle is not None: # If the device does not offer battery information, there is no point in asking again later on. # Remove the device from the battery-tracked devices, so that their battery is not wasted # trying to get an unavailable information. del devs_track_battery[mac] finally: adapter.stop() see_device(mac, devs[mac], battery=battery) if track_new: for address in devs: if address not in devs_to_track and address not in devs_donot_track: _LOGGER.info("Discovered Bluetooth LE device %s", address) see_device(address, devs[address], new_device=True) track_point_in_utc_time(hass, update_ble, dt_util.utcnow() + interval) update_ble(dt_util.utcnow()) return True
async def get(self, request, datetime=None): """Return history over a period of time.""" timer_start = time.perf_counter() if datetime: datetime = dt_util.parse_datetime(datetime) if datetime is None: return self.json_message("Invalid datetime", HTTP_BAD_REQUEST) now = dt_util.utcnow() one_day = timedelta(days=1) if datetime: start_time = dt_util.as_utc(datetime) else: start_time = now - one_day if start_time > now: return self.json([]) end_time = request.query.get("end_time") if end_time: end_time = dt_util.parse_datetime(end_time) if end_time: end_time = dt_util.as_utc(end_time) else: return self.json_message("Invalid end_time", HTTP_BAD_REQUEST) else: end_time = start_time + one_day entity_ids = request.query.get("filter_entity_id") if entity_ids: entity_ids = entity_ids.lower().split(",") include_start_time_state = "skip_initial_state" not in request.query hass = request.app["hass"] result = await hass.async_add_job( get_significant_states, hass, start_time, end_time, entity_ids, self.filters, include_start_time_state, ) result = list(result.values()) if _LOGGER.isEnabledFor(logging.DEBUG): elapsed = time.perf_counter() - timer_start _LOGGER.debug("Extracted %d states in %fs", sum(map(len, result)), elapsed) # Optionally reorder the result to respect the ordering given # by any entities explicitly included in the configuration. if self.use_include_order: sorted_result = [] for order_entity in self.filters.included_entities: for state_list in result: if state_list[0].entity_id == order_entity: sorted_result.append(state_list) result.remove(state_list) break sorted_result.extend(result) result = sorted_result return await hass.async_add_job(self.json, result)