def validate_device_has_at_least_one_identifier(value: ConfigType) -> \ ConfigType: """Validate that a device info entry has at least one identifying value.""" if not value.get(CONF_IDENTIFIERS) and not value.get(CONF_CONNECTIONS): raise vol.Invalid("Device must have at least one identifying value in " "'identifiers' and/or 'connections'") return value
async def async_setup_platform(hass: HomeAssistantType, config: ConfigType, async_add_devices, discovery_info=None): """Set up the WUnderground sensor.""" latitude = config.get(CONF_LATITUDE, hass.config.latitude) longitude = config.get(CONF_LONGITUDE, hass.config.longitude) pws_id = config.get(CONF_PWS_ID) rest = WUndergroundData( hass, config.get(CONF_API_KEY), pws_id, config.get(CONF_LANG), latitude, longitude) if pws_id is None: unique_id_base = "@{:06f},{:06f}".format(longitude, latitude) else: # Manually specified weather station, use that for unique_id unique_id_base = pws_id sensors = [] for variable in config[CONF_MONITORED_CONDITIONS]: sensors.append(WUndergroundSensor(hass, rest, variable, unique_id_base)) await rest.async_update() if not rest.data: raise PlatformNotReady async_add_devices(sensors, True)
async def async_setup_platform(hass: HomeAssistantType, config: ConfigType, async_add_devices, discovery_info=None): """Set up the cast platform.""" import pychromecast # Import CEC IGNORE attributes pychromecast.IGNORE_CEC += config.get(CONF_IGNORE_CEC, []) hass.data.setdefault(ADDED_CAST_DEVICES_KEY, {}) hass.data.setdefault(KNOWN_CHROMECASTS_KEY, {}) # None -> use discovery; (host, port) -> manually specify chromecast. want_host = None if discovery_info: want_host = (discovery_info.get('host'), discovery_info.get('port')) elif CONF_HOST in config: want_host = (config.get(CONF_HOST), DEFAULT_PORT) enable_discovery = False if want_host is None: # We were explicitly told to enable pychromecast discovery. enable_discovery = True elif want_host[1] != DEFAULT_PORT: # We're trying to add a group, so we have to use pychromecast's # discovery to get the correct friendly name. enable_discovery = True if enable_discovery: @callback def async_cast_discovered(chromecast): """Callback for when a new chromecast is discovered.""" if want_host is not None and \ (chromecast.host, chromecast.port) != want_host: return # for groups, only add requested device cast_device = _async_create_cast_device(hass, chromecast) if cast_device is not None: async_add_devices([cast_device]) async_dispatcher_connect(hass, SIGNAL_CAST_DISCOVERED, async_cast_discovered) # Re-play the callback for all past chromecasts, store the objects in # a list to avoid concurrent modification resulting in exception. for chromecast in list(hass.data[KNOWN_CHROMECASTS_KEY].values()): async_cast_discovered(chromecast) hass.async_add_job(_setup_internal_discovery, hass) else: # Manually add a "normal" Chromecast, we can do that without discovery. try: chromecast = await hass.async_add_job( pychromecast.Chromecast, *want_host) except pychromecast.ChromecastConnectionError as err: _LOGGER.warning("Can't set up chromecast on %s: %s", want_host[0], err) raise PlatformNotReady key = (chromecast.host, chromecast.port, chromecast.uuid) cast_device = _async_create_cast_device(hass, chromecast) if cast_device is not None: hass.data[KNOWN_CHROMECASTS_KEY][key] = chromecast async_add_devices([cast_device])
def setup_platform(hass, config: ConfigType, add_devices: Callable[[list], None], discovery_info=None): """Set up the Sesame platform.""" import pysesame email = config.get(CONF_EMAIL) password = config.get(CONF_PASSWORD) add_devices([SesameDevice(sesame) for sesame in pysesame.get_sesames(email, password)])
async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool: """Start the MQTT protocol service.""" conf = config.get(DOMAIN) # type: Optional[ConfigType] # We need this because discovery can cause components to be set up and # otherwise it will not load the users config. # This needs a better solution. hass.data[DATA_MQTT_HASS_CONFIG] = config if conf is None: # If we have a config entry, setup is done by that config entry. # If there is no config entry, this should fail. return bool(hass.config_entries.async_entries(DOMAIN)) conf = dict(conf) if CONF_EMBEDDED in conf or CONF_BROKER not in conf: if (conf.get(CONF_PASSWORD) is None and config.get('http', {}).get('api_password') is not None): _LOGGER.error( "Starting from release 0.76, the embedded MQTT broker does not" " use api_password as default password anymore. Please set" " password configuration. See https://home-assistant.io/docs/" "mqtt/broker#embedded-broker for details") return False broker_config = await _async_setup_server(hass, config) if broker_config is None: _LOGGER.error("Unable to start embedded MQTT broker") return False conf.update({ CONF_BROKER: broker_config[0], CONF_PORT: broker_config[1], CONF_USERNAME: broker_config[2], CONF_PASSWORD: broker_config[3], CONF_CERTIFICATE: broker_config[4], CONF_PROTOCOL: broker_config[5], CONF_CLIENT_KEY: None, CONF_CLIENT_CERT: None, CONF_TLS_INSECURE: None, }) hass.data[DATA_MQTT_CONFIG] = conf # Only import if we haven't before. if not hass.config_entries.async_entries(DOMAIN): hass.async_create_task(hass.config_entries.flow.async_init( DOMAIN, context={'source': config_entries.SOURCE_IMPORT}, data={} )) return True
def from_config(config: ConfigType, config_validation: bool=True): """Turn a condition configuration into a method.""" factory = getattr( sys.modules[__name__], FROM_CONFIG_FORMAT.format(config.get(CONF_CONDITION)), None) if factory is None: raise HomeAssistantError('Invalid condition "{}" specified {}'.format( config.get(CONF_CONDITION), config)) return factory(config, config_validation)
def zone_from_config(config: ConfigType, config_validation: bool = True) -> Callable[..., bool]: """Wrap action method with zone based condition.""" if config_validation: config = cv.ZONE_CONDITION_SCHEMA(config) entity_id = config.get(CONF_ENTITY_ID) zone_entity_id = config.get(CONF_ZONE) def if_in_zone(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool: """Test if condition.""" return zone(hass, zone_entity_id, entity_id) return if_in_zone
def time_from_config(config: ConfigType, config_validation: bool = True) -> Callable[..., bool]: """Wrap action method with time based condition.""" if config_validation: config = cv.TIME_CONDITION_SCHEMA(config) before = config.get(CONF_BEFORE) after = config.get(CONF_AFTER) weekday = config.get(CONF_WEEKDAY) def time_if(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool: """Validate time based if-condition.""" return time(before, after, weekday) return time_if
def state_from_config(config: ConfigType, config_validation: bool = True) -> Callable[..., bool]: """Wrap action method with state based condition.""" if config_validation: config = cv.STATE_CONDITION_SCHEMA(config) entity_id = config.get(CONF_ENTITY_ID) req_state = cast(str, config.get(CONF_STATE)) for_period = config.get('for') def if_state(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool: """Test if condition.""" return state(hass, entity_id, req_state, for_period) return if_state
def async_from_config(config: ConfigType, config_validation: bool = True): """Turn a condition configuration into a method. Should be run on the event loop. """ for fmt in (ASYNC_FROM_CONFIG_FORMAT, FROM_CONFIG_FORMAT): factory = getattr(sys.modules[__name__], fmt.format(config.get(CONF_CONDITION)), None) if factory: break if factory is None: raise HomeAssistantError('Invalid condition "{}" specified {}'.format(config.get(CONF_CONDITION), config)) return factory(config, config_validation)
def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the recorder.""" conf = config.get(DOMAIN, {}) keep_days = conf.get(CONF_PURGE_KEEP_DAYS) purge_interval = conf.get(CONF_PURGE_INTERVAL) db_url = conf.get(CONF_DB_URL, None) if not db_url: db_url = DEFAULT_URL.format( hass_config_path=hass.config.path(DEFAULT_DB_FILE)) include = conf.get(CONF_INCLUDE, {}) exclude = conf.get(CONF_EXCLUDE, {}) instance = hass.data[DATA_INSTANCE] = Recorder( hass=hass, keep_days=keep_days, purge_interval=purge_interval, uri=db_url, include=include, exclude=exclude) instance.async_initialize() instance.start() @asyncio.coroutine def async_handle_purge_service(service): """Handle calls to the purge service.""" instance.do_adhoc_purge(service.data[ATTR_KEEP_DAYS]) hass.services.async_register( DOMAIN, SERVICE_PURGE, async_handle_purge_service, schema=SERVICE_PURGE_SCHEMA) return (yield from instance.async_db_ready)
def sun_from_config(config: ConfigType, config_validation: bool = True) -> Callable[..., bool]: """Wrap action method with sun based condition.""" if config_validation: config = cv.SUN_CONDITION_SCHEMA(config) before = config.get('before') after = config.get('after') before_offset = config.get('before_offset') after_offset = config.get('after_offset') def time_if(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool: """Validate time based if-condition.""" return sun(hass, before, after, before_offset, after_offset) return time_if
def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the recorder.""" conf = config.get(DOMAIN, {}) keep_days = conf.get(CONF_PURGE_KEEP_DAYS) purge_interval = conf.get(CONF_PURGE_INTERVAL) if keep_days is None and purge_interval != 0: _LOGGER.warning( "From version 0.64.0 the 'recorder' component will by default " "purge data older than 10 days. To keep data longer you must " "configure 'purge_keep_days' or 'purge_interval'.") db_url = conf.get(CONF_DB_URL, None) if not db_url: db_url = DEFAULT_URL.format( hass_config_path=hass.config.path(DEFAULT_DB_FILE)) include = conf.get(CONF_INCLUDE, {}) exclude = conf.get(CONF_EXCLUDE, {}) instance = hass.data[DATA_INSTANCE] = Recorder( hass=hass, keep_days=keep_days, purge_interval=purge_interval, uri=db_url, include=include, exclude=exclude) instance.async_initialize() instance.start() @asyncio.coroutine def async_handle_purge_service(service): """Handle calls to the purge service.""" instance.do_adhoc_purge(service.data[ATTR_KEEP_DAYS]) hass.services.async_register( DOMAIN, SERVICE_PURGE, async_handle_purge_service, schema=SERVICE_PURGE_SCHEMA) return (yield from instance.async_db_ready)
def async_setup_scanner_platform(hass: HomeAssistantType, config: ConfigType, scanner: Any, async_see_device: Callable): """Helper method to connect scanner-based platform to device tracker. This method is a coroutine. """ interval = config.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL) # Initial scan of each mac we also tell about host name for config seen = set() # type: Any def device_tracker_scan(now: dt_util.dt.datetime): """Called when interval matches.""" found_devices = scanner.scan_devices() for mac in found_devices: if mac in seen: host_name = None else: host_name = scanner.get_device_name(mac) seen.add(mac) hass.add_job(async_see_device(mac=mac, host_name=host_name)) async_track_utc_time_change( hass, device_tracker_scan, second=range(0, 60, interval)) hass.async_add_job(device_tracker_scan, None)
def async_setup_scanner_platform(hass: HomeAssistantType, config: ConfigType, scanner: Any, async_see_device: Callable, platform: str): """Set up the connect scanner-based platform to device tracker. This method must be run in the event loop. """ interval = config.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL) update_lock = asyncio.Lock(loop=hass.loop) scanner.hass = hass # Initial scan of each mac we also tell about host name for config seen = set() # type: Any @asyncio.coroutine def async_device_tracker_scan(now: dt_util.dt.datetime): """Handle interval matches.""" if update_lock.locked(): _LOGGER.warning( "Updating device list from %s took longer than the scheduled " "scan interval %s", platform, interval) return with (yield from update_lock): found_devices = yield from scanner.async_scan_devices() for mac in found_devices: if mac in seen: host_name = None else: host_name = yield from scanner.async_get_device_name(mac) seen.add(mac) try: extra_attributes = (yield from scanner.async_get_extra_attributes(mac)) except NotImplementedError: extra_attributes = dict() kwargs = { 'mac': mac, 'host_name': host_name, 'source_type': SOURCE_TYPE_ROUTER, 'attributes': { 'scanner': scanner.__class__.__name__, **extra_attributes } } zone_home = hass.states.get(zone.ENTITY_ID_HOME) if zone_home: kwargs['gps'] = [zone_home.attributes[ATTR_LATITUDE], zone_home.attributes[ATTR_LONGITUDE]] kwargs['gps_accuracy'] = 0 hass.async_add_job(async_see_device(**kwargs)) async_track_time_interval(hass, async_device_tracker_scan, interval) hass.async_add_job(async_device_tracker_scan(None))
async def async_setup(hass: HomeAssistantType, config: ConfigType): """Set up UPnP component.""" conf_default = CONFIG_SCHEMA({DOMAIN: {}})[DOMAIN] conf = config.get(DOMAIN, conf_default) local_ip = await hass.async_add_executor_job(get_local_ip) hass.data[DOMAIN] = { 'config': conf, 'devices': {}, 'local_ip': config.get(CONF_LOCAL_IP, local_ip), 'ports': conf.get('ports', {}), } if conf is not None: hass.async_create_task(hass.config_entries.flow.async_init( DOMAIN, context={'source': config_entries.SOURCE_IMPORT})) return True
def setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the ISY 994 platform.""" isy_config = config.get(DOMAIN) user = isy_config.get(CONF_USERNAME) password = isy_config.get(CONF_PASSWORD) tls_version = isy_config.get(CONF_TLS_VER) host = urlparse(isy_config.get(CONF_HOST)) port = host.port addr = host.geturl() hidden_identifier = isy_config.get( CONF_HIDDEN_STRING, DEFAULT_HIDDEN_STRING) sensor_identifier = isy_config.get( CONF_SENSOR_STRING, DEFAULT_SENSOR_STRING) global HIDDEN_STRING HIDDEN_STRING = hidden_identifier if host.scheme == 'http': addr = addr.replace('http://', '') https = False elif host.scheme == 'https': addr = addr.replace('https://', '') https = True else: _LOGGER.error("isy994 host value in configuration is invalid") return False addr = addr.replace(':{}'.format(port), '') import PyISY global PYISY PYISY = PyISY # Connect to ISY controller. global ISY ISY = PyISY.ISY(addr, port, username=user, password=password, use_https=https, tls_ver=tls_version, log=_LOGGER) if not ISY.connected: return False _categorize_nodes(hidden_identifier, sensor_identifier) _categorize_programs() if ISY.configuration.get('Weather Information'): _categorize_weather() # Listen for HA stop to disconnect. hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop) # Load platforms for the devices in the ISY controller that we support. for component in SUPPORTED_DOMAINS: discovery.load_platform(hass, component, DOMAIN, {}, config) ISY.auto_update = True return True
def setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the ISY 994 platform.""" hass.data[ISY994_NODES] = {} for domain in SUPPORTED_DOMAINS: hass.data[ISY994_NODES][domain] = [] hass.data[ISY994_WEATHER] = [] hass.data[ISY994_PROGRAMS] = {} for domain in SUPPORTED_DOMAINS: hass.data[ISY994_PROGRAMS][domain] = [] isy_config = config.get(DOMAIN) user = isy_config.get(CONF_USERNAME) password = isy_config.get(CONF_PASSWORD) tls_version = isy_config.get(CONF_TLS_VER) host = urlparse(isy_config.get(CONF_HOST)) ignore_identifier = isy_config.get(CONF_IGNORE_STRING) sensor_identifier = isy_config.get(CONF_SENSOR_STRING) enable_climate = isy_config.get(CONF_ENABLE_CLIMATE) if host.scheme == 'http': https = False port = host.port or 80 elif host.scheme == 'https': https = True port = host.port or 443 else: _LOGGER.error("isy994 host value in configuration is invalid") return False import PyISY # Connect to ISY controller. isy = PyISY.ISY(host.hostname, port, username=user, password=password, use_https=https, tls_ver=tls_version, log=_LOGGER) if not isy.connected: return False _categorize_nodes(hass, isy.nodes, ignore_identifier, sensor_identifier) _categorize_programs(hass, isy.programs) if enable_climate and isy.configuration.get('Weather Information'): _categorize_weather(hass, isy.climate) def stop(event: object) -> None: """Stop ISY auto updates.""" isy.auto_update = False # Listen for HA stop to disconnect. hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop) # Load platforms for the devices in the ISY controller that we support. for component in SUPPORTED_DOMAINS: discovery.load_platform(hass, component, DOMAIN, {}, config) isy.auto_update = True return True
def async_setup_platform(hass: HomeAssistantType, config: ConfigType, async_add_devices, discovery_info=None): """Set up the WUnderground sensor.""" latitude = config.get(CONF_LATITUDE, hass.config.latitude) longitude = config.get(CONF_LONGITUDE, hass.config.longitude) rest = WUndergroundData( hass, config.get(CONF_API_KEY), config.get(CONF_PWS_ID), config.get(CONF_LANG), latitude, longitude) sensors = [] for variable in config[CONF_MONITORED_CONDITIONS]: sensors.append(WUndergroundSensor(hass, rest, variable)) yield from rest.async_update() if not rest.data: raise PlatformNotReady async_add_devices(sensors, True)
def setup(hass: HomeAssistant, config: ConfigType) -> bool: """Setup the recorder.""" global _INSTANCE # pylint: disable=global-statement if _INSTANCE is not None: _LOGGER.error('Only a single instance allowed.') return False purge_days = config.get(DOMAIN, {}).get(CONF_PURGE_DAYS) db_url = config.get(DOMAIN, {}).get(CONF_DB_URL, None) if not db_url: db_url = DEFAULT_URL.format( hass_config_path=hass.config.path(DEFAULT_DB_FILE)) _INSTANCE = Recorder(hass, purge_days=purge_days, uri=db_url) return True
def async_numeric_state_from_config(config: ConfigType, config_validation: bool = True) \ -> Callable[..., bool]: """Wrap action method with state based condition.""" if config_validation: config = cv.NUMERIC_STATE_CONDITION_SCHEMA(config) entity_id = config.get(CONF_ENTITY_ID) below = config.get(CONF_BELOW) above = config.get(CONF_ABOVE) value_template = config.get(CONF_VALUE_TEMPLATE) def if_numeric_state(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool: """Test numeric state condition.""" if value_template is not None: value_template.hass = hass return async_numeric_state( hass, entity_id, below, above, value_template, variables) return if_numeric_state
def setup_platform(hass: HomeAssistantType, config: ConfigType, add_entities: Callable[[list], None], discovery_info: Optional[dict] = None) -> bool: """Set up the GTFS sensor.""" gtfs_dir = hass.config.path(DEFAULT_PATH) data = str(config.get(CONF_DATA)) origin = config.get(CONF_ORIGIN) destination = config.get(CONF_DESTINATION) name = config.get(CONF_NAME) offset = config.get(CONF_OFFSET) include_tomorrow = config.get(CONF_TOMORROW) if not os.path.exists(gtfs_dir): os.makedirs(gtfs_dir) if not os.path.exists(os.path.join(gtfs_dir, data)): _LOGGER.error("The given GTFS data file/folder was not found") return False import pygtfs (gtfs_root, _) = os.path.splitext(data) sqlite_file = "{}.sqlite?check_same_thread=False".format(gtfs_root) joined_path = os.path.join(gtfs_dir, sqlite_file) gtfs = pygtfs.Schedule(joined_path) # pylint: disable=no-member if not gtfs.feeds: pygtfs.append_feed(gtfs, os.path.join(gtfs_dir, data)) add_entities([ GTFSDepartureSensor(gtfs, name, origin, destination, offset, include_tomorrow)]) return True
async def async_setup(hass: HomeAssistantType, config: ConfigType): """Set up the person component.""" component = EntityComponent(_LOGGER, DOMAIN, hass) conf_persons = config.get(DOMAIN, []) manager = hass.data[DOMAIN] = PersonManager(hass, component, conf_persons) await manager.async_initialize() websocket_api.async_register_command(hass, ws_list_person) websocket_api.async_register_command(hass, ws_create_person) websocket_api.async_register_command(hass, ws_update_person) websocket_api.async_register_command(hass, ws_delete_person) return True
async def _async_setup_platform(hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info): """Set up the cast platform.""" import pychromecast # Import CEC IGNORE attributes pychromecast.IGNORE_CEC += config.get(CONF_IGNORE_CEC, []) hass.data.setdefault(ADDED_CAST_DEVICES_KEY, set()) hass.data.setdefault(KNOWN_CHROMECAST_INFO_KEY, set()) info = None if discovery_info is not None: info = ChromecastInfo(host=discovery_info['host'], port=discovery_info['port']) elif CONF_HOST in config: info = ChromecastInfo(host=config[CONF_HOST], port=DEFAULT_PORT) @callback def async_cast_discovered(discover: ChromecastInfo) -> None: """Handle discovery of a new chromecast.""" if info is not None and info.host_port != discover.host_port: # Not our requested cast device. return cast_device = _async_create_cast_device(hass, discover) if cast_device is not None: async_add_entities([cast_device]) remove_handler = async_dispatcher_connect( hass, SIGNAL_CAST_DISCOVERED, async_cast_discovered) # Re-play the callback for all past chromecasts, store the objects in # a list to avoid concurrent modification resulting in exception. for chromecast in list(hass.data[KNOWN_CHROMECAST_INFO_KEY]): async_cast_discovered(chromecast) if info is None or info.is_audio_group: # If we were a) explicitly told to enable discovery or # b) have an audio group cast device, we need internal discovery. hass.async_add_job(_setup_internal_discovery, hass) else: info = await hass.async_add_job(_fill_out_missing_chromecast_info, info) if info.friendly_name is None: _LOGGER.debug("Cannot retrieve detail information for chromecast" " %s, the device may not be online", info) remove_handler() raise PlatformNotReady hass.async_add_job(_discover_chromecast, hass, info)
async def _async_setup_server(hass: HomeAssistantType, config: ConfigType): """Try to start embedded MQTT broker. This method is a coroutine. """ conf = config.get(DOMAIN, {}) # type: ConfigType success, broker_config = \ await server.async_start( hass, conf.get(CONF_PASSWORD), conf.get(CONF_EMBEDDED)) if not success: return None return broker_config
async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool: """Start the MQTT protocol service.""" conf = config.get(DOMAIN) # type: Optional[ConfigType] # We need this because discovery can cause components to be set up and # otherwise it will not load the users config. # This needs a better solution. hass.data[DATA_MQTT_HASS_CONFIG] = config websocket_api.async_register_command(hass, websocket_subscribe) if conf is None: # If we have a config entry, setup is done by that config entry. # If there is no config entry, this should fail. return bool(hass.config_entries.async_entries(DOMAIN)) conf = dict(conf) if CONF_EMBEDDED in conf or CONF_BROKER not in conf: broker_config = await _async_setup_server(hass, config) if broker_config is None: _LOGGER.error("Unable to start embedded MQTT broker") return False conf.update({ CONF_BROKER: broker_config[0], CONF_PORT: broker_config[1], CONF_USERNAME: broker_config[2], CONF_PASSWORD: broker_config[3], CONF_CERTIFICATE: broker_config[4], CONF_PROTOCOL: broker_config[5], CONF_CLIENT_KEY: None, CONF_CLIENT_CERT: None, CONF_TLS_INSECURE: None, }) hass.data[DATA_MQTT_CONFIG] = conf # Only import if we haven't before. if not hass.config_entries.async_entries(DOMAIN): hass.async_create_task(hass.config_entries.flow.async_init( DOMAIN, context={'source': config_entries.SOURCE_IMPORT}, data={} )) return True
def async_template_from_config(config: ConfigType, config_validation: bool = True) \ -> Callable[..., bool]: """Wrap action method with state based condition.""" if config_validation: config = cv.TEMPLATE_CONDITION_SCHEMA(config) value_template = cast(Template, config.get(CONF_VALUE_TEMPLATE)) def template_if(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool: """Validate template based if-condition.""" value_template.hass = hass return async_template(hass, value_template, variables) return template_if
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the HomematicIP Cloud component.""" hass.data[DOMAIN] = {} accesspoints = config.get(DOMAIN, []) for conf in accesspoints: if conf[CONF_ACCESSPOINT] not in configured_haps(hass): hass.async_add_job(hass.config_entries.flow.async_init( DOMAIN, context={'source': config_entries.SOURCE_IMPORT}, data={ HMIPC_HAPID: conf[CONF_ACCESSPOINT], HMIPC_AUTHTOKEN: conf[CONF_AUTHTOKEN], HMIPC_NAME: conf[CONF_NAME], } )) return True
def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Setup the recorder.""" conf = config.get(DOMAIN, {}) purge_days = conf.get(CONF_PURGE_DAYS) db_url = conf.get(CONF_DB_URL, None) if not db_url: db_url = DEFAULT_URL.format( hass_config_path=hass.config.path(DEFAULT_DB_FILE)) include = conf.get(CONF_INCLUDE, {}) exclude = conf.get(CONF_EXCLUDE, {}) instance = hass.data[DATA_INSTANCE] = Recorder( hass, purge_days=purge_days, uri=db_url, include=include, exclude=exclude) instance.async_initialize() instance.start() return (yield from instance.async_db_ready)
async def _async_setup_discovery(hass: HomeAssistantType, config: ConfigType) -> bool: """Try to start the discovery of MQTT devices. This method is a coroutine. """ conf = config.get(DOMAIN, {}) # type: ConfigType discovery = await async_prepare_setup_platform( hass, config, DOMAIN, 'discovery') if discovery is None: _LOGGER.error("Unable to load MQTT discovery") return False success = await discovery.async_start( hass, conf[CONF_DISCOVERY_PREFIX], config) # type: bool return success
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Start the KNX integration.""" hass.data[DATA_HASS_CONFIG] = config conf: ConfigType | None = config.get(DOMAIN) if conf is None: # If we have a config entry, setup is done by that config entry. # If there is no config entry, this should fail. return bool(hass.config_entries.async_entries(DOMAIN)) conf = dict(conf) hass.data[DATA_KNX_CONFIG] = conf # Only import if we haven't before. if not hass.config_entries.async_entries(DOMAIN): hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=conf)) return True
async def async_setup_platform( hass: HomeAssistant, config: ConfigType, async_add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the openSenseMap air quality platform.""" name = config.get(CONF_NAME) station_id = config[CONF_STATION_ID] session = async_get_clientsession(hass) osm_api = OpenSenseMapData(OpenSenseMap(station_id, session)) await osm_api.async_update() if "name" not in osm_api.api.data: _LOGGER.error("Station %s is not available", station_id) raise PlatformNotReady station_name = osm_api.api.data["name"] if name is None else name async_add_entities([OpenSenseMapQuality(station_name, osm_api)], True)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the isy994 integration from YAML.""" isy_config: Optional[ConfigType] = config.get(DOMAIN) hass.data.setdefault(DOMAIN, {}) if not isy_config: return True # Only import if we haven't before. config_entry = _async_find_matching_config_entry(hass) if not config_entry: hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=dict(isy_config), ) ) return True # Update the entry based on the YAML configuration, in case it changed. hass.config_entries.async_update_entry(config_entry, data=dict(isy_config)) return True
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Hisense AEH-W4A1 integration.""" conf = config.get(DOMAIN) hass.data[DOMAIN] = {} if conf is not None: devices = conf[CONF_IP_ADDRESS][:] for device in devices: try: await AehW4a1(device).check() except pyaehw4a1.exceptions.ConnectionError: conf[CONF_IP_ADDRESS].remove(device) _LOGGER.warning("Hisense AEH-W4A1 at %s not found", device) if conf[CONF_IP_ADDRESS]: hass.data[DOMAIN] = conf hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, ) ) return True
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Pushbullet Sensor platform.""" try: pushbullet = PushBullet(config.get(CONF_API_KEY)) except InvalidKeyError: _LOGGER.error("Wrong API key for Pushbullet supplied") return pbprovider = PushBulletNotificationProvider(pushbullet) monitored_conditions = config[CONF_MONITORED_CONDITIONS] entities = [ PushBulletNotificationSensor(pbprovider, description) for description in SENSOR_TYPES if description.key in monitored_conditions ] add_entities(entities)
async def async_step_import(self, import_config: ConfigType) -> FlowResult: """Handle the initial step.""" host = import_config.get(CONF_HOST) port = import_config[CONF_PORT] status = self._abort_if_host_port_configured(port, host, import_config) if status is not None: return status try: info = await _validate_dsmr_connection(self.hass, import_config) except CannotConnect: return self.async_abort(reason="cannot_connect") except CannotCommunicate: return self.async_abort(reason="cannot_communicate") name = f"{host}:{port}" if host is not None else port data = {**import_config, **info} await self.async_set_unique_id(info[CONF_SERIAL_ID]) self._abort_if_unique_id_configured(data) return self.async_create_entry(title=name, data=data)
def create_local_device(self, device_cfg: ConfigType) -> Device: """ Create device with local connector. :param device_cfg: Configuration from which to create the device :return: Device object """ _LOGGER.debug('Creating device via get_add_device with config: %s' % device_cfg) protocol_id = device_cfg.get(CONF_PROTOCOL) protocol = SUPPORTED_PROTOCOLS[protocol_id] connect_port = device_cfg.get(CONF_PORT, protocol.get(PROTOCOL_PORT)) if connect_port is None: raise Exception( 'Protocol "%s" for device with ID "%s" does not provide default port. Please, ' 'configure port manually.' % (protocol_id, device_cfg.get(CONF_DEVICE_ID))) connector = LocalConnector( host=device_cfg.get(CONF_HOST), port=connect_port, application_id=device_cfg.get(CONF_APPLICATION_ID, DEFAULT_APPLICATION_ID), ) connector.timeout = device_cfg.get(CONF_TIMEOUT, DEFAULT_TIMEOUT) device_id = device_cfg[CONF_DEVICE_ID] device = Device(device_id=device_id, control_key=device_cfg.get(CONF_CONTROL_KEY), protocol=protocol[PROTOCOL_DEFINITION]) device.connector = connector if CONF_NAME not in device_cfg: device_cfg[CONF_NAME] = DEFAULT_NAME_DEVICE.format( protocol_name=protocol.get(PROTOCOL_NAME, protocol_id), device_id=device_id) self.add_device(device, device_cfg) return device
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Activate the Alexa component.""" if DOMAIN not in config: return True config = config[DOMAIN] flash_briefings_config = config.get(CONF_FLASH_BRIEFINGS) intent.async_setup(hass) if flash_briefings_config: flash_briefings.async_setup(hass, flash_briefings_config) try: smart_home_config = config[CONF_SMART_HOME] except KeyError: pass else: smart_home_config = smart_home_config or SMART_HOME_SCHEMA({}) await smart_home_http.async_setup(hass, smart_home_config) return True
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Withings component.""" conf = config.get(DOMAIN, {}) if not conf: return True # Make the config available to the oauth2 config flow. hass.data[DOMAIN] = {const.CONFIG: conf} # Setup the oauth2 config flow. config_flow.WithingsFlowHandler.async_register_implementation( hass, WithingsLocalOAuth2Implementation( hass, const.DOMAIN, conf[CONF_CLIENT_ID], conf[CONF_CLIENT_SECRET], f"{WithingsAuth.URL}/oauth2_user/authorize2", f"{WithingsAuth.URL}/oauth2/token", ), ) return True
async def async_setup(hass: HomeAssistantType, config: ConfigType): """Set up the Withings component.""" conf = config.get(DOMAIN) if not conf: return True hass.data[DOMAIN] = {const.CONFIG: conf} base_url = conf.get(const.BASE_URL, hass.config.api.base_url).rstrip('/') # We don't pull default values from conf because the config # schema would have validated it for us. for profile in conf.get(const.PROFILES): config_flow.register_flow_implementation(hass, conf.get(const.CLIENT_ID), conf.get(const.CLIENT_SECRET), base_url, profile) hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={'source': const.SOURCE_USER}, data={})) return True
def setup_scanner_platform(hass: HomeAssistantType, config: ConfigType, scanner: Any, see_device: Callable): """Helper method to connect scanner-based platform to device tracker.""" interval = config.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL) # Initial scan of each mac we also tell about host name for config seen = set() # type: Any def device_tracker_scan(now: dt_util.dt.datetime): """Called when interval matches.""" for mac in scanner.scan_devices(): if mac in seen: host_name = None else: host_name = scanner.get_device_name(mac) seen.add(mac) see_device(mac=mac, host_name=host_name) track_utc_time_change(hass, device_tracker_scan, second=range(0, 60, interval)) device_tracker_scan(None)
async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool: """Set up the HomematicIP Cloud component.""" hass.data[DOMAIN] = {} accesspoints = config.get(DOMAIN, []) for conf in accesspoints: if conf[CONF_ACCESSPOINT] not in { entry.data[HMIPC_HAPID] for entry in hass.config_entries.async_entries(DOMAIN) }: hass.async_add_job( hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data={ HMIPC_HAPID: conf[CONF_ACCESSPOINT], HMIPC_AUTHTOKEN: conf[CONF_AUTHTOKEN], HMIPC_NAME: conf[CONF_NAME], }, )) return True
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Start the MQTT protocol service.""" conf: ConfigType | None = config.get(DOMAIN) websocket_api.async_register_command(hass, websocket_subscribe) websocket_api.async_register_command(hass, websocket_remove_device) websocket_api.async_register_command(hass, websocket_mqtt_info) debug_info.initialize(hass) if conf: conf = dict(conf) hass.data[DATA_MQTT_CONFIG] = conf if not bool(hass.config_entries.async_entries(DOMAIN)): hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={ "source": config_entries.SOURCE_INTEGRATION_DISCOVERY }, data={}, )) return True
def __init__(self, hass, config: ConfigType, see) -> None: """Initialize the scanner.""" self.see = see self.username = config[CONF_USERNAME] self.max_gps_accuracy = config[CONF_MAX_GPS_ACCURACY] self.scan_interval = config.get(CONF_SCAN_INTERVAL) or timedelta( seconds=60) self._prev_seen: dict[str, str] = {} credfile = f"{hass.config.path(CREDENTIALS_FILE)}.{slugify(self.username)}" try: self.service = Service(credfile, self.username) self._update_info() track_time_interval(hass, self._update_info, self.scan_interval) self.success_init = True except InvalidCookies: _LOGGER.error( "The cookie file provided does not provide a valid session. Please create another one and try again" ) self.success_init = False
def setup_platform( hass: HomeAssistantType, config: ConfigType, add_entities: Callable, discovery_info: Optional[dict] = None, ): """Set up a Hisense TV.""" broadcast_address = config.get(CONF_BROADCAST_ADDRESS) host = config[CONF_HOST] mac = config[CONF_MAC] name = config[CONF_NAME] add_entities( [ HisenseTvEntity( host=host, mac=mac, name=name, broadcast_address=broadcast_address, ) ], True, )
async def async_setup_platform( hass: HomeAssistant, config: ConfigType, async_add_entities: entity_platform.AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Import yaml config and initiates config flow for Switchbot devices.""" # Check if entry config exists and skips import if it does. if hass.config_entries.async_entries(DOMAIN): return hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data={ CONF_NAME: config[CONF_NAME], CONF_PASSWORD: config.get(CONF_PASSWORD, None), CONF_MAC: config[CONF_MAC].replace("-", ":").lower(), CONF_SENSOR_TYPE: ATTR_BOT, }, ))
async def async_setup_platform( hass: HomeAssistant, config: ConfigType, async_add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the requested World Air Quality Index locations.""" token = config[CONF_TOKEN] station_filter = config.get(CONF_STATIONS) locations = config[CONF_LOCATIONS] client = WaqiClient(token, async_get_clientsession(hass), timeout=TIMEOUT) dev = [] try: for location_name in locations: stations = await client.search(location_name) _LOGGER.debug("The following stations were returned: %s", stations) for station in stations: waqi_sensor = WaqiSensor(client, station) if ( not station_filter or { waqi_sensor.uid, waqi_sensor.url, waqi_sensor.station_name, } & set(station_filter) ): dev.append(waqi_sensor) except ( aiohttp.client_exceptions.ClientConnectorError, asyncio.TimeoutError, ) as err: _LOGGER.exception("Failed to connect to WAQI servers") raise PlatformNotReady from err async_add_entities(dev, True)
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the sensors. Login to the bank and get a list of existing accounts. Create a sensor for each account. """ credentials = BankCredentials( config[CONF_BIN], config[CONF_USERNAME], config[CONF_PIN], config[CONF_URL] ) fints_name = config.get(CONF_NAME, config[CONF_BIN]) account_config = { acc[CONF_ACCOUNT]: acc[CONF_NAME] for acc in config[CONF_ACCOUNTS] } holdings_config = { acc[CONF_ACCOUNT]: acc[CONF_NAME] for acc in config[CONF_HOLDINGS] } client = FinTsClient(credentials, fints_name) balance_accounts, holdings_accounts = client.detect_accounts() accounts: list[SensorEntity] = [] for account in balance_accounts: if config[CONF_ACCOUNTS] and account.iban not in account_config: _LOGGER.info("Skipping account %s for bank %s", account.iban, fints_name) continue if not (account_name := account_config.get(account.iban)): account_name = f"{fints_name} - {account.iban}" accounts.append(FinTsAccount(client, account, account_name)) _LOGGER.debug("Creating account %s for bank %s", account.iban, fints_name)
async def _async_process_config(hass: HomeAssistant, config: ConfigType) -> None: """Process group configuration.""" hass.data.setdefault(GROUP_ORDER, 0) entities = [] domain_config: dict[str, dict[str, Any]] = config.get(DOMAIN, {}) for object_id, conf in domain_config.items(): name: str = conf.get(CONF_NAME, object_id) entity_ids: Iterable[str] = conf.get(CONF_ENTITIES) or [] icon: str | None = conf.get(CONF_ICON) mode = bool(conf.get(CONF_ALL)) order: int = hass.data[GROUP_ORDER] # We keep track of the order when we are creating the tasks # in the same way that async_create_group does to make # sure we use the same ordering system. This overcomes # the problem with concurrently creating the groups entities.append( Group.async_create_group_entity( hass, name, entity_ids, icon=icon, object_id=object_id, mode=mode, order=order, ) ) # Keep track of the group order without iterating # every state in the state machine every time # we setup a new group hass.data[GROUP_ORDER] += 1 # If called before the platform async_setup is called (test cases) await _async_get_component(hass).async_add_entities(entities)
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up cover controlled by shell commands.""" setup_reload_service(hass, DOMAIN, PLATFORMS) devices = config.get(CONF_COVERS, {}) covers = [] for device_name, device_config in devices.items(): value_template = device_config.get(CONF_VALUE_TEMPLATE) if value_template is not None: value_template.hass = hass covers.append( CommandCover( hass, device_config.get(CONF_FRIENDLY_NAME, device_name), device_config[CONF_COMMAND_OPEN], device_config[CONF_COMMAND_CLOSE], device_config[CONF_COMMAND_STOP], device_config.get(CONF_COMMAND_STATE), value_template, device_config[CONF_COMMAND_TIMEOUT], device_config.get(CONF_UNIQUE_ID), ) ) if not covers: _LOGGER.error("No covers added") return add_entities(covers)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the FFmpeg component.""" conf = config.get(DOMAIN, {}) manager = FFmpegManager(hass, conf.get(CONF_FFMPEG_BIN, DEFAULT_BINARY)) await manager.async_get_version() # Register service async def async_service_handle(service: ServiceCall) -> None: """Handle service ffmpeg process.""" entity_ids = service.data.get(ATTR_ENTITY_ID) if service.service == SERVICE_START: async_dispatcher_send(hass, SIGNAL_FFMPEG_START, entity_ids) elif service.service == SERVICE_STOP: async_dispatcher_send(hass, SIGNAL_FFMPEG_STOP, entity_ids) else: async_dispatcher_send(hass, SIGNAL_FFMPEG_RESTART, entity_ids) hass.services.async_register(DOMAIN, SERVICE_START, async_service_handle, schema=SERVICE_FFMPEG_SCHEMA) hass.services.async_register(DOMAIN, SERVICE_STOP, async_service_handle, schema=SERVICE_FFMPEG_SCHEMA) hass.services.async_register(DOMAIN, SERVICE_RESTART, async_service_handle, schema=SERVICE_FFMPEG_SCHEMA) hass.data[DATA_FFMPEG] = manager return True
async def async_setup_platform( hass: HomeAssistant, config: ConfigType, async_add_entities: Callable, discovery_info: Any | None = None, ) -> None: """Set up the system monitor sensors.""" entities = [] sensor_registry: dict[str, SensorData] = {} for resource in config[CONF_RESOURCES]: type_ = resource[CONF_TYPE] # Initialize the sensor argument if none was provided. # For disk monitoring default to "/" (root) to prevent runtime errors, if argument was not specified. if CONF_ARG not in resource: argument = "" if resource[CONF_TYPE].startswith("disk_"): argument = "/" else: argument = resource[CONF_ARG] # Verify if we can retrieve CPU / processor temperatures. # If not, do not create the entity and add a warning to the log if (type_ == "processor_temperature" and await hass.async_add_executor_job(_read_cpu_temperature) is None): _LOGGER.warning( "Cannot read CPU / processor temperature information") continue sensor_registry[type_] = SensorData(argument, None, None, None, None) entities.append(SystemMonitorSensor(sensor_registry, type_, argument)) scan_interval = config.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL) await async_setup_sensor_registry_updates(hass, sensor_registry, scan_interval) async_add_entities(entities)
async def async_setup(hass: HomeAssistantType, config: ConfigType): """Set up the remote_homeassistant component.""" hass.data.setdefault(DOMAIN, {}) async def _handle_reload(service): """Handle reload service call.""" config = await async_integration_yaml_config(hass, DOMAIN) if not config or DOMAIN not in config: return current_entries = hass.config_entries.async_entries(DOMAIN) entries_by_id = {entry.unique_id: entry for entry in current_entries} instances = config[DOMAIN][CONF_INSTANCES] update_tasks = [ _async_update_config_entry_if_from_yaml(hass, entries_by_id, instance) for instance in instances ] await asyncio.gather(*update_tasks) hass.helpers.service.async_register_admin_service( DOMAIN, SERVICE_RELOAD, _handle_reload, ) instances = config.get(DOMAIN, {}).get(CONF_INSTANCES, []) for instance in instances: hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=instance ) ) return True
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Start the MQTT protocol service.""" conf: ConfigType | None = config.get(DOMAIN) websocket_api.async_register_command(hass, websocket_subscribe) websocket_api.async_register_command(hass, websocket_mqtt_info) debug_info.initialize(hass) if conf: conf = dict(conf) hass.data[DATA_MQTT_CONFIG] = conf if not bool(hass.config_entries.async_entries(DOMAIN)): # Create an import flow if the user has yaml configured entities etc. # but no broker configuration. Note: The intention is not for this to # import broker configuration from YAML because that has been deprecated. hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, data={}, ) ) return True
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up Huawei LTE component.""" # dicttoxml (used by huawei-lte-api) has uselessly verbose INFO level. # https://github.com/quandyfactory/dicttoxml/issues/60 logging.getLogger("dicttoxml").setLevel(logging.WARNING) # Arrange our YAML config to dict with normalized URLs as keys domain_config: dict[str, dict[str, Any]] = {} if DOMAIN not in hass.data: hass.data[DOMAIN] = HuaweiLteData(hass_config=config, config=domain_config) for router_config in config.get(DOMAIN, []): domain_config[url_normalize( router_config.pop(CONF_URL))] = router_config def service_handler(service: ServiceCall) -> None: """Apply a service.""" routers = hass.data[DOMAIN].routers if url := service.data.get(CONF_URL): router = routers.get(url) elif not routers: _LOGGER.error("%s: no routers configured", service.service) return
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Find and return switches controlled by telnet commands.""" devices = config.get(CONF_SWITCHES, {}) switches = [] for object_id, device_config in devices.items(): value_template = device_config.get(CONF_VALUE_TEMPLATE) if value_template is not None: value_template.hass = hass switches.append( TelnetSwitch( hass, object_id, device_config.get(CONF_RESOURCE), device_config.get(CONF_PORT), device_config.get(CONF_NAME, object_id), device_config.get(CONF_COMMAND_ON), device_config.get(CONF_COMMAND_OFF), device_config.get(CONF_COMMAND_STATE), value_template, device_config.get(CONF_TIMEOUT), ) ) if not switches: _LOGGER.error("No switches added") return add_entities(switches)
async def async_setup_platform( hass: HomeAssistant, config: ConfigType, async_add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Push Camera platform.""" if PUSH_CAMERA_DATA not in hass.data: hass.data[PUSH_CAMERA_DATA] = {} webhook_id = config.get(CONF_WEBHOOK_ID) cameras = [ PushCamera( hass, config[CONF_NAME], config[CONF_BUFFER_SIZE], config[CONF_TIMEOUT], config[CONF_IMAGE_FIELD], webhook_id, ) ] async_add_entities(cameras)
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Xiaomi TV platform.""" # If a hostname is set. Discovery is skipped. host = config.get(CONF_HOST) name = config.get(CONF_NAME) if host is not None: # Check if there's a valid TV at the IP address. if not pymitv.Discover().check_ip(host): _LOGGER.error("Could not find Xiaomi TV with specified IP: %s", host) else: # Register TV with Home Assistant. add_entities([XiaomiTV(host, name)]) else: # Otherwise, discover TVs on network. add_entities( XiaomiTV(tv, DEFAULT_NAME) for tv in pymitv.Discover().scan())
async def async_setup_platform( hass: HomeAssistant, config: ConfigType, async_add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Web scrape sensor.""" _LOGGER.warning( # Config flow added in Home Assistant Core 2022.7, remove import flow in 2022.9 "Loading Scrape via platform setup has been deprecated in Home Assistant 2022.7 " "Your configuration has been automatically imported and you can " "remove it from your configuration.yaml") if config.get(CONF_VALUE_TEMPLATE): template: Template = Template(config[CONF_VALUE_TEMPLATE]) template.ensure_valid() config[CONF_VALUE_TEMPLATE] = template.template hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=config, ))
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities_callback: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up Kankun Wifi switches.""" switches = config.get("switches", {}) devices = [] for dev_name, properties in switches.items(): devices.append( KankunSwitch( hass, properties.get(CONF_NAME, dev_name), properties.get(CONF_HOST), properties.get(CONF_PORT, DEFAULT_PORT), properties.get(CONF_PATH, DEFAULT_PATH), properties.get(CONF_USERNAME), properties.get(CONF_PASSWORD), ) ) add_entities_callback(devices)