async def async_setup(hass: HomeAssistantType, config: ConfigType): """Register a port mapping for Home Assistant via UPnP.""" await async_ensure_domain_data(hass) # ensure sane config if DOMAIN not in config: return True upnp_config = config[DOMAIN] # overridden local ip if CONF_LOCAL_IP in upnp_config: hass.data[DOMAIN]['local_ip'] = upnp_config[CONF_LOCAL_IP] # determine ports ports = {CONF_HASS: CONF_HASS} # default, port_mapping disabled by default if CONF_PORTS in upnp_config: # copy from config ports = upnp_config[CONF_PORTS] hass.data[DOMAIN]['auto_config'] = { 'active': True, 'enable_sensors': upnp_config[CONF_ENABLE_SENSORS], 'enable_port_mapping': upnp_config[CONF_ENABLE_PORT_MAPPING], 'ports': ports, } return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigType) -> bool: """Set up Toon from a config entry.""" from toonapilib import Toon conf = hass.data.get(DATA_TOON_CONFIG) toon = await hass.async_add_executor_job(partial( Toon, entry.data[CONF_USERNAME], entry.data[CONF_PASSWORD], conf[CONF_CLIENT_ID], conf[CONF_CLIENT_SECRET], tenant_id=entry.data[CONF_TENANT], display_common_name=entry.data[CONF_DISPLAY])) hass.data.setdefault(DATA_TOON_CLIENT, {})[entry.entry_id] = toon # Register device for the Meter Adapter, since it will have no entities. device_registry = await dr.async_get_registry(hass) device_registry.async_get_or_create( config_entry_id=entry.entry_id, identifiers={ (DOMAIN, toon.agreement.id, 'meter_adapter'), }, manufacturer='Eneco', name="Meter Adapter", via_hub=(DOMAIN, toon.agreement.id) ) for component in 'binary_sensor', 'climate', 'sensor': hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, component)) return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry): """Set up Point from a config entry.""" from pypoint import PointSession def token_saver(token): _LOGGER.debug('Saving updated token') entry.data[CONF_TOKEN] = token hass.config_entries.async_update_entry(entry, data={**entry.data}) # Force token update. entry.data[CONF_TOKEN]['expires_in'] = -1 session = PointSession( entry.data['refresh_args']['client_id'], token=entry.data[CONF_TOKEN], auto_refresh_kwargs=entry.data['refresh_args'], token_saver=token_saver, ) if not session.is_authorized: _LOGGER.error('Authentication Error') return False hass.data[DATA_CONFIG_ENTRY_LOCK] = asyncio.Lock() hass.data[CONFIG_ENTRY_IS_SETUP] = set() await async_setup_webhook(hass, entry, session) client = MinutPointClient(hass, entry, session) hass.data.setdefault(DOMAIN, {}).update({entry.entry_id: client}) await client.update() return True
def async_publish(hass: HomeAssistantType, topic: Any, payload, qos=None, retain=None) -> None: """Publish message to an MQTT topic.""" data = _build_publish_data(topic, qos, retain) data[ATTR_PAYLOAD] = payload hass.async_create_task( hass.services.async_call(DOMAIN, SERVICE_PUBLISH, data))
async def async_migrate_entry(hass: HomeAssistantType, entry: ConfigEntry): """Handle migration of a previous version config entry. A config entry created under a previous version must go through the integration setup again so we can properly retrieve the needed data elements. Force this by removing the entry and triggering a new flow. """ from pysmartthings import SmartThings # Remove the installed_app, which if already removed raises a 403 error. api = SmartThings(async_get_clientsession(hass), entry.data[CONF_ACCESS_TOKEN]) installed_app_id = entry.data[CONF_INSTALLED_APP_ID] try: await api.delete_installed_app(installed_app_id) except ClientResponseError as ex: if ex.status == 403: _LOGGER.exception("Installed app %s has already been removed", installed_app_id) else: raise _LOGGER.debug("Removed installed app %s", installed_app_id) # Delete the entry hass.async_create_task( hass.config_entries.async_remove(entry.entry_id)) # only create new flow if there isn't a pending one for SmartThings. flows = hass.config_entries.flow.async_progress() if not [flow for flow in flows if flow['handler'] == DOMAIN]: hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={'source': 'import'})) # Return False because it could not be migrated. return False
def async_setup_scanner_platform(hass: HomeAssistantType, config: ConfigType, scanner: Any, async_see_device: Callable): """Helper method to connect scanner-based platform to device tracker. This method is a coroutine. """ interval = config.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL) # Initial scan of each mac we also tell about host name for config seen = set() # type: Any def device_tracker_scan(now: dt_util.dt.datetime): """Called when interval matches.""" found_devices = scanner.scan_devices() for mac in found_devices: if mac in seen: host_name = None else: host_name = scanner.get_device_name(mac) seen.add(mac) hass.add_job(async_see_device(mac=mac, host_name=host_name)) async_track_utc_time_change( hass, device_tracker_scan, second=range(0, 60, interval)) hass.async_add_job(device_tracker_scan, None)
async def async_setup_platform(hass: HomeAssistantType, config: ConfigType, async_add_devices, discovery_info=None): """Set up the cast platform.""" import pychromecast # Import CEC IGNORE attributes pychromecast.IGNORE_CEC += config.get(CONF_IGNORE_CEC, []) hass.data.setdefault(ADDED_CAST_DEVICES_KEY, {}) hass.data.setdefault(KNOWN_CHROMECASTS_KEY, {}) # None -> use discovery; (host, port) -> manually specify chromecast. want_host = None if discovery_info: want_host = (discovery_info.get('host'), discovery_info.get('port')) elif CONF_HOST in config: want_host = (config.get(CONF_HOST), DEFAULT_PORT) enable_discovery = False if want_host is None: # We were explicitly told to enable pychromecast discovery. enable_discovery = True elif want_host[1] != DEFAULT_PORT: # We're trying to add a group, so we have to use pychromecast's # discovery to get the correct friendly name. enable_discovery = True if enable_discovery: @callback def async_cast_discovered(chromecast): """Callback for when a new chromecast is discovered.""" if want_host is not None and \ (chromecast.host, chromecast.port) != want_host: return # for groups, only add requested device cast_device = _async_create_cast_device(hass, chromecast) if cast_device is not None: async_add_devices([cast_device]) async_dispatcher_connect(hass, SIGNAL_CAST_DISCOVERED, async_cast_discovered) # Re-play the callback for all past chromecasts, store the objects in # a list to avoid concurrent modification resulting in exception. for chromecast in list(hass.data[KNOWN_CHROMECASTS_KEY].values()): async_cast_discovered(chromecast) hass.async_add_job(_setup_internal_discovery, hass) else: # Manually add a "normal" Chromecast, we can do that without discovery. try: chromecast = await hass.async_add_job( pychromecast.Chromecast, *want_host) except pychromecast.ChromecastConnectionError as err: _LOGGER.warning("Can't set up chromecast on %s: %s", want_host[0], err) raise PlatformNotReady key = (chromecast.host, chromecast.port, chromecast.uuid) cast_device = _async_create_cast_device(hass, chromecast) if cast_device is not None: hass.data[KNOWN_CHROMECASTS_KEY][key] = chromecast async_add_devices([cast_device])
def async_setup_scanner_platform(hass: HomeAssistantType, config: ConfigType, scanner: Any, async_see_device: Callable, platform: str): """Set up the connect scanner-based platform to device tracker. This method must be run in the event loop. """ interval = config.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL) update_lock = asyncio.Lock(loop=hass.loop) scanner.hass = hass # Initial scan of each mac we also tell about host name for config seen = set() # type: Any @asyncio.coroutine def async_device_tracker_scan(now: dt_util.dt.datetime): """Handle interval matches.""" if update_lock.locked(): _LOGGER.warning( "Updating device list from %s took longer than the scheduled " "scan interval %s", platform, interval) return with (yield from update_lock): found_devices = yield from scanner.async_scan_devices() for mac in found_devices: if mac in seen: host_name = None else: host_name = yield from scanner.async_get_device_name(mac) seen.add(mac) try: extra_attributes = (yield from scanner.async_get_extra_attributes(mac)) except NotImplementedError: extra_attributes = dict() kwargs = { 'mac': mac, 'host_name': host_name, 'source_type': SOURCE_TYPE_ROUTER, 'attributes': { 'scanner': scanner.__class__.__name__, **extra_attributes } } zone_home = hass.states.get(zone.ENTITY_ID_HOME) if zone_home: kwargs['gps'] = [zone_home.attributes[ATTR_LATITUDE], zone_home.attributes[ATTR_LONGITUDE]] kwargs['gps_accuracy'] = 0 hass.async_add_job(async_see_device(**kwargs)) async_track_time_interval(hass, async_device_tracker_scan, interval) hass.async_add_job(async_device_tracker_scan(None))
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry): """Initialize config entry which represents the HEOS controller.""" from pyheos import Heos, CommandError host = entry.data[CONF_HOST] # Setting all_progress_events=False ensures that we only receive a # media position update upon start of playback or when media changes controller = Heos(host, all_progress_events=False) try: await controller.connect(auto_reconnect=True) # Auto reconnect only operates if initial connection was successful. except (asyncio.TimeoutError, ConnectionError, CommandError) as error: await controller.disconnect() _LOGGER.debug("Unable to connect to controller %s: %s", host, error) raise ConfigEntryNotReady # Disconnect when shutting down async def disconnect_controller(event): await controller.disconnect() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, disconnect_controller) # Get players and sources try: players = await controller.get_players() favorites = {} if controller.is_signed_in: favorites = await controller.get_favorites() else: _LOGGER.warning( "%s is not logged in to a HEOS account and will be unable " "to retrieve HEOS favorites: Use the 'heos.sign_in' service " "to sign-in to a HEOS account", host) inputs = await controller.get_input_sources() except (asyncio.TimeoutError, ConnectionError, CommandError) as error: await controller.disconnect() _LOGGER.debug("Unable to retrieve players and sources: %s", error, exc_info=isinstance(error, CommandError)) raise ConfigEntryNotReady controller_manager = ControllerManager(hass, controller) await controller_manager.connect_listeners() source_manager = SourceManager(favorites, inputs) source_manager.connect_update(hass, controller) hass.data[DOMAIN] = { DATA_CONTROLLER_MANAGER: controller_manager, DATA_SOURCE_MANAGER: source_manager, MEDIA_PLAYER_DOMAIN: players } services.register(hass, controller) hass.async_create_task(hass.config_entries.async_forward_entry_setup( entry, MEDIA_PLAYER_DOMAIN)) return True
async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool: """Start the MQTT protocol service.""" conf = config.get(DOMAIN) # type: Optional[ConfigType] # We need this because discovery can cause components to be set up and # otherwise it will not load the users config. # This needs a better solution. hass.data[DATA_MQTT_HASS_CONFIG] = config if conf is None: # If we have a config entry, setup is done by that config entry. # If there is no config entry, this should fail. return bool(hass.config_entries.async_entries(DOMAIN)) conf = dict(conf) if CONF_EMBEDDED in conf or CONF_BROKER not in conf: if (conf.get(CONF_PASSWORD) is None and config.get('http', {}).get('api_password') is not None): _LOGGER.error( "Starting from release 0.76, the embedded MQTT broker does not" " use api_password as default password anymore. Please set" " password configuration. See https://home-assistant.io/docs/" "mqtt/broker#embedded-broker for details") return False broker_config = await _async_setup_server(hass, config) if broker_config is None: _LOGGER.error("Unable to start embedded MQTT broker") return False conf.update({ CONF_BROKER: broker_config[0], CONF_PORT: broker_config[1], CONF_USERNAME: broker_config[2], CONF_PASSWORD: broker_config[3], CONF_CERTIFICATE: broker_config[4], CONF_PROTOCOL: broker_config[5], CONF_CLIENT_KEY: None, CONF_CLIENT_CERT: None, CONF_TLS_INSECURE: None, }) hass.data[DATA_MQTT_CONFIG] = conf # Only import if we haven't before. if not hass.config_entries.async_entries(DOMAIN): hass.async_create_task(hass.config_entries.flow.async_init( DOMAIN, context={'source': config_entries.SOURCE_IMPORT}, data={} )) return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry): """Establish connection with Daikin.""" conf = entry.data daikin_api = await daikin_api_setup(hass, conf[CONF_HOST]) if not daikin_api: return False hass.data.setdefault(DOMAIN, {}).update({entry.entry_id: daikin_api}) for component in COMPONENT_TYPES: hass.async_create_task( hass.config_entries.async_forward_entry_setup( entry, component)) return True
async def async_setup_platform( hass: HomeAssistantType, config, async_add_entities, discovery_info=None): """Set up DLNA DMR platform.""" if config.get(CONF_URL) is not None: url = config[CONF_URL] name = config.get(CONF_NAME) elif discovery_info is not None: url = discovery_info['ssdp_description'] name = discovery_info.get('name') if DLNA_DMR_DATA not in hass.data: hass.data[DLNA_DMR_DATA] = {} if 'lock' not in hass.data[DLNA_DMR_DATA]: hass.data[DLNA_DMR_DATA]['lock'] = asyncio.Lock() # build upnp/aiohttp requester from async_upnp_client.aiohttp import AiohttpSessionRequester session = async_get_clientsession(hass) requester = AiohttpSessionRequester(session, True) # ensure event handler has been started with await hass.data[DLNA_DMR_DATA]['lock']: server_host = config.get(CONF_LISTEN_IP) if server_host is None: server_host = get_local_ip() server_port = config.get(CONF_LISTEN_PORT, DEFAULT_LISTEN_PORT) callback_url_override = config.get(CONF_CALLBACK_URL_OVERRIDE) event_handler = await async_start_event_handler( hass, server_host, server_port, requester, callback_url_override) # create upnp device from async_upnp_client import UpnpFactory factory = UpnpFactory(requester, disable_state_variable_validation=True) try: upnp_device = await factory.async_create_device(url) except (asyncio.TimeoutError, aiohttp.ClientError): raise PlatformNotReady() # wrap with DmrDevice from async_upnp_client.profiles.dlna import DmrDevice dlna_device = DmrDevice(upnp_device, event_handler) # create our own device device = DlnaDmrDevice(dlna_device, name) _LOGGER.debug("Adding device: %s", device) async_add_entities([device], True)
async def _async_setup_platform(hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info): """Set up the cast platform.""" import pychromecast # Import CEC IGNORE attributes pychromecast.IGNORE_CEC += config.get(CONF_IGNORE_CEC, []) hass.data.setdefault(ADDED_CAST_DEVICES_KEY, set()) hass.data.setdefault(KNOWN_CHROMECAST_INFO_KEY, set()) info = None if discovery_info is not None: info = ChromecastInfo(host=discovery_info['host'], port=discovery_info['port']) elif CONF_HOST in config: info = ChromecastInfo(host=config[CONF_HOST], port=DEFAULT_PORT) @callback def async_cast_discovered(discover: ChromecastInfo) -> None: """Handle discovery of a new chromecast.""" if info is not None and info.host_port != discover.host_port: # Not our requested cast device. return cast_device = _async_create_cast_device(hass, discover) if cast_device is not None: async_add_entities([cast_device]) remove_handler = async_dispatcher_connect( hass, SIGNAL_CAST_DISCOVERED, async_cast_discovered) # Re-play the callback for all past chromecasts, store the objects in # a list to avoid concurrent modification resulting in exception. for chromecast in list(hass.data[KNOWN_CHROMECAST_INFO_KEY]): async_cast_discovered(chromecast) if info is None or info.is_audio_group: # If we were a) explicitly told to enable discovery or # b) have an audio group cast device, we need internal discovery. hass.async_add_job(_setup_internal_discovery, hass) else: info = await hass.async_add_job(_fill_out_missing_chromecast_info, info) if info.friendly_name is None: _LOGGER.debug("Cannot retrieve detail information for chromecast" " %s, the device may not be online", info) remove_handler() raise PlatformNotReady hass.async_add_job(_discover_chromecast, hass, info)
async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool: """Start the MQTT protocol service.""" conf = config.get(DOMAIN) # type: Optional[ConfigType] # We need this because discovery can cause components to be set up and # otherwise it will not load the users config. # This needs a better solution. hass.data[DATA_MQTT_HASS_CONFIG] = config websocket_api.async_register_command(hass, websocket_subscribe) if conf is None: # If we have a config entry, setup is done by that config entry. # If there is no config entry, this should fail. return bool(hass.config_entries.async_entries(DOMAIN)) conf = dict(conf) if CONF_EMBEDDED in conf or CONF_BROKER not in conf: broker_config = await _async_setup_server(hass, config) if broker_config is None: _LOGGER.error("Unable to start embedded MQTT broker") return False conf.update({ CONF_BROKER: broker_config[0], CONF_PORT: broker_config[1], CONF_USERNAME: broker_config[2], CONF_PASSWORD: broker_config[3], CONF_CERTIFICATE: broker_config[4], CONF_PROTOCOL: broker_config[5], CONF_CLIENT_KEY: None, CONF_CLIENT_CERT: None, CONF_TLS_INSECURE: None, }) hass.data[DATA_MQTT_CONFIG] = conf # Only import if we haven't before. if not hass.config_entries.async_entries(DOMAIN): hass.async_create_task(hass.config_entries.flow.async_init( DOMAIN, context={'source': config_entries.SOURCE_IMPORT}, data={} )) return True
def async_see(hass: HomeAssistantType, mac: str = None, dev_id: str = None, host_name: str = None, location_name: str = None, gps: GPSType = None, gps_accuracy=None, battery: int = None, attributes: dict = None): """Call service to notify you see device.""" data = {key: value for key, value in ((ATTR_MAC, mac), (ATTR_DEV_ID, dev_id), (ATTR_HOST_NAME, host_name), (ATTR_LOCATION_NAME, location_name), (ATTR_GPS, gps), (ATTR_GPS_ACCURACY, gps_accuracy), (ATTR_BATTERY, battery)) if value is not None} if attributes: data[ATTR_ATTRIBUTES] = attributes hass.async_add_job(hass.services.async_call(DOMAIN, SERVICE_SEE, data))
def _setup_internal_discovery(hass: HomeAssistantType) -> None: """Set up the pychromecast internal discovery.""" if INTERNAL_DISCOVERY_RUNNING_KEY not in hass.data: hass.data[INTERNAL_DISCOVERY_RUNNING_KEY] = threading.Lock() if not hass.data[INTERNAL_DISCOVERY_RUNNING_KEY].acquire(blocking=False): # Internal discovery is already running return import pychromecast def internal_callback(name): """Called when zeroconf has discovered a new chromecast.""" mdns = listener.services[name] _discover_chromecast(hass, ChromecastInfo(*mdns)) _LOGGER.debug("Starting internal pychromecast discovery.") listener, browser = pychromecast.start_discovery(internal_callback) def stop_discovery(event): """Stop discovery of new chromecasts.""" _LOGGER.debug("Stopping internal pychromecast discovery.") pychromecast.stop_discovery(browser) hass.data[INTERNAL_DISCOVERY_RUNNING_KEY].release() hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_discovery)
async def async_setup(hass: HomeAssistantType, config: ConfigType): """Set up the mobile app component.""" store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY) app_config = await store.async_load() if app_config is None: app_config = { DATA_BINARY_SENSOR: {}, DATA_CONFIG_ENTRIES: {}, DATA_DELETED_IDS: [], DATA_DEVICES: {}, DATA_SENSOR: {} } hass.data[DOMAIN] = { DATA_BINARY_SENSOR: app_config.get(DATA_BINARY_SENSOR, {}), DATA_CONFIG_ENTRIES: {}, DATA_DELETED_IDS: app_config.get(DATA_DELETED_IDS, []), DATA_DEVICES: {}, DATA_SENSOR: app_config.get(DATA_SENSOR, {}), DATA_STORE: store, } hass.http.register_view(RegistrationsView()) register_websocket_handlers(hass) for deleted_id in hass.data[DOMAIN][DATA_DELETED_IDS]: try: webhook_register(hass, DOMAIN, "Deleted Webhook", deleted_id, handle_webhook) except ValueError: pass return True
async def async_setup(hass: HomeAssistantType, config: ConfigType): """Set up UPnP component.""" conf_default = CONFIG_SCHEMA({DOMAIN: {}})[DOMAIN] conf = config.get(DOMAIN, conf_default) local_ip = await hass.async_add_executor_job(get_local_ip) hass.data[DOMAIN] = { 'config': conf, 'devices': {}, 'local_ip': config.get(CONF_LOCAL_IP, local_ip), 'ports': conf.get('ports', {}), } if conf is not None: hass.async_create_task(hass.config_entries.flow.async_init( DOMAIN, context={'source': config_entries.SOURCE_IMPORT})) return True
async def async_setup_entry(hass: HomeAssistantType, config_entry: ConfigEntry): """Set up UPnP/IGD-device from a config entry.""" await async_ensure_domain_data(hass) data = config_entry.data # build UPnP/IGD device ssdp_description = data[CONF_SSDP_DESCRIPTION] try: device = await Device.async_create_device(hass, ssdp_description) except (asyncio.TimeoutError, aiohttp.ClientError): _LOGGER.error('Unable to create upnp-device') return False hass.data[DOMAIN]['devices'][device.udn] = device # port mapping if data.get(CONF_ENABLE_PORT_MAPPING): local_ip = hass.data[DOMAIN]['local_ip'] ports = hass.data[DOMAIN]['auto_config']['ports'] _LOGGER.debug('Enabling port mappings: %s', ports) hass_port = None if hasattr(hass, 'http'): hass_port = hass.http.server_port ports = _substitute_hass_ports(ports, hass_port=hass_port) await device.async_add_port_mappings(ports, local_ip) # sensors if data.get(CONF_ENABLE_SENSORS): _LOGGER.debug('Enabling sensors') # register sensor setup handlers hass.async_create_task(hass.config_entries.async_forward_entry_setup( config_entry, 'sensor')) async def delete_port_mapping(event): """Delete port mapping on quit.""" if data.get(CONF_ENABLE_PORT_MAPPING): _LOGGER.debug('Deleting port mappings') await device.async_delete_port_mappings() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, delete_port_mapping) return True
async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool: """Set up the Toon components.""" if DOMAIN not in config: return True conf = config[DOMAIN] # Store config to be used during entry setup hass.data[DATA_TOON_CONFIG] = conf return True
async def async_setup(hass: HomeAssistantType, config: ConfigType): """Set up the HEOS component.""" if DOMAIN not in config: return True host = config[DOMAIN][CONF_HOST] entries = hass.config_entries.async_entries(DOMAIN) if not entries: # Create new entry based on config hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={'source': 'import'}, data={CONF_HOST: host})) else: # Check if host needs to be updated entry = entries[0] if entry.data[CONF_HOST] != host: entry.data[CONF_HOST] = host entry.title = format_title(host) hass.config_entries.async_update_entry(entry) return True
def async_setup_scanner_platform(hass: HomeAssistantType, config: ConfigType, scanner: Any, async_see_device: Callable): """Helper method to connect scanner-based platform to device tracker. This method is a coroutine. """ interval = config.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL) scanner.hass = hass # Initial scan of each mac we also tell about host name for config seen = set() # type: Any @asyncio.coroutine def async_device_tracker_scan(now: dt_util.dt.datetime): """Called when interval matches.""" found_devices = yield from scanner.async_scan_devices() for mac in found_devices: if mac in seen: host_name = None else: host_name = yield from scanner.async_get_device_name(mac) seen.add(mac) kwargs = { 'mac': mac, 'host_name': host_name, 'source_type': SOURCE_TYPE_ROUTER } zone_home = hass.states.get(zone.ENTITY_ID_HOME) if zone_home: kwargs['gps'] = [zone_home.attributes[ATTR_LATITUDE], zone_home.attributes[ATTR_LONGITUDE]] kwargs['gps_accuracy'] = 0 hass.async_add_job(async_see_device(**kwargs)) async_track_time_interval(hass, async_device_tracker_scan, interval) hass.async_add_job(async_device_tracker_scan, None)
async def async_get_registry(hass: HomeAssistantType) -> ZhaDeviceStorage: """Return zha device storage instance.""" task = hass.data.get(DATA_REGISTRY) if task is None: async def _load_reg() -> ZhaDeviceStorage: registry = ZhaDeviceStorage(hass) await registry.async_load() return registry task = hass.data[DATA_REGISTRY] = hass.async_create_task(_load_reg()) return cast(ZhaDeviceStorage, await task)
async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool: """Set up the MySensors component.""" hass.data[DOMAIN] = {DATA_HASS_CONFIG: config} if DOMAIN not in config or bool(hass.config_entries.async_entries(DOMAIN)): return True config = config[DOMAIN] user_inputs = [ { CONF_DEVICE: gw[CONF_DEVICE], CONF_BAUD_RATE: gw[CONF_BAUD_RATE], CONF_TCP_PORT: gw[CONF_TCP_PORT], CONF_TOPIC_OUT_PREFIX: gw.get(CONF_TOPIC_OUT_PREFIX, ""), CONF_TOPIC_IN_PREFIX: gw.get(CONF_TOPIC_IN_PREFIX, ""), CONF_RETAIN: config[CONF_RETAIN], CONF_VERSION: config[CONF_VERSION], CONF_PERSISTENCE_FILE: gw.get(CONF_PERSISTENCE_FILE) # nodes config ignored at this time. renaming nodes can now be done from the frontend. } for gw in config[CONF_GATEWAYS] ] user_inputs = [ {k: v for k, v in userinput.items() if v is not None} for userinput in user_inputs ] # there is an actual configuration in configuration.yaml, so we have to process it for user_input in user_inputs: hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=user_input, ) ) return True
async def async_setup_entry(hass: HomeAssistantType, entry: config_entries.ConfigEntry): """Set up an access point from a config entry.""" client = Client(entry.data[CONF_HOST], entry.data[CONF_PORT]) config = hass.data[DOMAIN_DATA_CONFIG].get( (entry.data[CONF_HOST], entry.data[CONF_PORT]), DEVICE_SCHEMA( {CONF_HOST: entry.data[CONF_HOST], CONF_PORT: entry.data[CONF_PORT]} ), ) hass.data[DOMAIN_DATA_ENTRIES][entry.entry_id] = { "client": client, "config": config, } asyncio.ensure_future(_run_client(hass, client, config[CONF_SCAN_INTERVAL])) hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, "media_player") ) return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry): """Set up Ketra platforms from config entry.""" installation_id = entry.data.get("installation_id") oauth_token = entry.data.get(CONF_ACCESS_TOKEN) hub = await N4Hub.get_hub(installation_id, oauth_token, loop=hass.loop) _LOGGER.info( "Discovered N4 Hub at endpoint '%s' for installation '%s'", hub.url_base, installation_id, ) hass.data.setdefault(DOMAIN, {}) hass.data[DOMAIN][entry.unique_id] = { "common_platform": KetraPlatformCommon(hass, hub, _LOGGER) } for platform in KETRA_PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, platform)) return True
async def async_setup_platform( hass: HomeAssistantType, config: ConfigType, async_add_entities: Callable[[List[Entity], bool], None], discovery_info: Any = None, ) -> None: """Import the platform into a config entry.""" if len(hass.config_entries.async_entries(DOMAIN)) > 0: return True config[CONF_BASE_PATH] = f"{config[CONF_URLBASE]}{DEFAULT_BASE_PATH}" config[CONF_UPCOMING_DAYS] = int(config[CONF_DAYS]) config[CONF_VERIFY_SSL] = False del config[CONF_DAYS] del config[CONF_INCLUDED] del config[CONF_MONITORED_CONDITIONS] del config[CONF_URLBASE] hass.async_create_task( hass.config_entries.flow.async_init(DOMAIN, context={"source": SOURCE_IMPORT}, data=config))
async def async_setup(hass: HomeAssistantType, config: ConfigType): """Set up the Withings component.""" conf = config.get(DOMAIN) if not conf: return True hass.data[DOMAIN] = {const.CONFIG: conf} base_url = conf.get(const.BASE_URL, hass.config.api.base_url).rstrip('/') # We don't pull default values from conf because the config # schema would have validated it for us. for profile in conf.get(const.PROFILES): config_flow.register_flow_implementation(hass, conf.get(const.CLIENT_ID), conf.get(const.CLIENT_SECRET), base_url, profile) hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={'source': const.SOURCE_USER}, data={})) return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool: """Set up Lyric from a config entry.""" conf = hass.data[DATA_LYRIC_CONFIG] client_id = conf[CONF_CLIENT_ID] client_secret = conf[CONF_CLIENT_SECRET] token = entry.data[CONF_TOKEN] token_cache_file = hass.config.path(CONF_LYRIC_CONFIG_FILE) lyric = Lyric(app_name='Home Assistant', client_id=client_id, client_secret=client_secret, token=token, token_cache_file=token_cache_file) hass.data.setdefault(DOMAIN, {})[DATA_LYRIC_CLIENT] = LyricClient(lyric) for component in 'climate', 'sensor': hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, component)) return True
async def async_unload_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool: """Unload a config entry.""" unload = all(await asyncio.gather(*[ hass.config_entries.async_forward_entry_unload(entry, platform) for platform in PLATFORMS ])) await asyncio.gather(*[ hass.async_add_executor_job(gateway.websocket_disconnect) for gateway in hass.data[DOMAIN][entry.entry_id]["gateways"] ]) hass.data[DOMAIN][entry.entry_id]["listener"]() hass.data[DOMAIN].pop(entry.entry_id) return unload
async def async_get_registry(hass: HomeAssistantType) -> ZhaStorage: """Return zha device storage instance.""" task = hass.data.get(DATA_REGISTRY) if task is None: async def _load_reg() -> ZhaStorage: registry = ZhaStorage(hass) await registry.async_load() return registry task = hass.data[DATA_REGISTRY] = hass.async_create_task(_load_reg()) return cast(ZhaStorage, await task)
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry): """Establish connection with Daikin.""" conf = entry.data # For backwards compat, set unique ID if entry.unique_id is None: hass.config_entries.async_update_entry(entry, unique_id=conf[KEY_MAC]) elif ".local" in entry.unique_id: hass.config_entries.async_update_entry(entry, unique_id=conf[KEY_MAC]) daikin_api = await daikin_api_setup( hass, conf[CONF_HOST], conf.get(CONF_API_KEY), conf.get(CONF_UUID), conf.get(CONF_PASSWORD), ) if not daikin_api: return False hass.data.setdefault(DOMAIN, {}).update({entry.entry_id: daikin_api}) for component in COMPONENT_TYPES: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, component) ) return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry): """Create entry for Bosch thermostat device.""" _LOGGER.info(f"Setting up Bosch component version {LIBVERSION}.") uuid = entry.data[UUID] gateway_entry = BoschGatewayEntry(hass=hass, uuid=uuid, host=entry.data[CONF_ADDRESS], protocol=entry.data[CONF_PROTOCOL], device_type=entry.data[CONF_DEVICE_TYPE], access_key=entry.data[ACCESS_KEY], access_token=entry.data[ACCESS_TOKEN], entry=entry) hass.data[DOMAIN][uuid] = {BOSCH_GATEWAY_ENTRY: gateway_entry} return await gateway_entry.async_init()
async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool: """Set up the HomematicIP Cloud component.""" hass.data[DOMAIN] = {} accesspoints = config.get(DOMAIN, []) for conf in accesspoints: if conf[CONF_ACCESSPOINT] not in { entry.data[HMIPC_HAPID] for entry in hass.config_entries.async_entries(DOMAIN) }: hass.async_add_job( hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data={ HMIPC_HAPID: conf[CONF_ACCESSPOINT], HMIPC_AUTHTOKEN: conf[CONF_AUTHTOKEN], HMIPC_NAME: conf[CONF_NAME], }, )) return True
def _setup_internal_discovery(hass: HomeAssistantType) -> None: """Set up the pychromecast internal discovery.""" if INTERNAL_DISCOVERY_RUNNING_KEY not in hass.data: hass.data[INTERNAL_DISCOVERY_RUNNING_KEY] = threading.Lock() if not hass.data[INTERNAL_DISCOVERY_RUNNING_KEY].acquire(blocking=False): # Internal discovery is already running return import pychromecast def internal_add_callback(name): """Handle zeroconf discovery of a new chromecast.""" mdns = listener.services[name] _discover_chromecast( hass, ChromecastInfo( service=name, host=mdns[0], port=mdns[1], uuid=mdns[2], model_name=mdns[3], friendly_name=mdns[4], )) def internal_remove_callback(name, mdns): """Handle zeroconf discovery of a removed chromecast.""" _remove_chromecast( hass, ChromecastInfo( service=name, host=mdns[0], port=mdns[1], uuid=mdns[2], model_name=mdns[3], friendly_name=mdns[4], )) _LOGGER.debug("Starting internal pychromecast discovery.") listener, browser = pychromecast.start_discovery(internal_add_callback, internal_remove_callback) ChromeCastZeroconf.set_zeroconf(browser.zc) def stop_discovery(event): """Stop discovery of new chromecasts.""" _LOGGER.debug("Stopping internal pychromecast discovery.") pychromecast.stop_discovery(browser) hass.data[INTERNAL_DISCOVERY_RUNNING_KEY].release() hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_discovery)
async def async_setup_entry(hass: HomeAssistantType, config_entry: ConfigEntry) -> bool: """Set up Minecraft Server from a config entry.""" domain_data = hass.data.setdefault(DOMAIN, {}) # Create and store server instance. unique_id = config_entry.unique_id _LOGGER.debug( "Creating server instance for '%s' (%s)", config_entry.data[CONF_NAME], config_entry.data[CONF_HOST], ) server = MinecraftServer(hass, unique_id, config_entry.data) domain_data[unique_id] = server await server.async_update() server.start_periodic_update() # Set up platforms. for platform in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, platform) ) return True
async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool: hass.data[DOMAIN] = {} kwargs = dict(config[DOMAIN]) dev = kwargs.get(CONF_DEVICE) mac = kwargs.get(CONF_MAC) key = kwargs.get(CONF_PASSWORD) scan_delta = kwargs.get(CONF_SCAN_INTERVAL) if len(key) != 16: _LOGGER.error("key value is empty or wrong") return False mac_validation = bool( re.match('^' + '[\:\-]'.join(['([0-9a-f]{2})'] * 6) + '$', mac.lower())) if not mac_validation: _LOGGER.error("mac value is empty or wrong") return False kettler = hass.data[DOMAIN]["kettler"] = RedmondKettler( hass, mac, key, dev) try: await kettler.firstConnect() except: _LOGGER.warning("Connect to Kettler %s through device %s failed", mac, dev) return False async_track_time_interval(hass, kettler.async_update, scan_delta) for platform in SUPPORTED_DOMAINS: hass.async_create_task( async_load_platform(hass, platform, DOMAIN, {}, config)) return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry): """Establish connection with Neohub""" conf = entry.data hub = await neohub_api_setup(hass, conf[CONF_HOST]) hass.data.setdefault(DOMAIN, {}).update({entry.entry_id: hub}) # determining mac address is only possible on local subnet # not if running in a non-bridged docker container, for example. mac = get_mac_from_host(conf[CONF_HOST]) _LOGGER.info("neohub host: %s mac: %s" % (conf[CONF_HOST], mac)) if mac is None: conns = {('hub_ip', conf[CONF_HOST])} idents = {(DOMAIN, conf[CONF_HOST])} else: conns = {(CONNECTION_NETWORK_MAC, mac)} idents = {(DOMAIN, mac)} _LOGGER.info("Adding neohub to device registry, entry_id: %s" % entry.entry_id) # add to device registry dev_reg = await dr.async_get_registry(hass) dev_reg.async_get_or_create( config_entry_id=entry.entry_id, connections=conns, identifiers=idents, manufacturer='Heatmiser', name='NeoHub', model=hub.dcb.get('DEVICE_ID'), sw_version=hub.dcb.get('Firmware version'), ) # set up devices for: climate. switch, sensor? for component in COMPONENT_TYPES: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, component)) return True
async def async_setup(hass: HomeAssistantType, config: ConfigType): """Set up the mobile app component.""" store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY) app_config = await store.async_load() if app_config is None: app_config = { DATA_BINARY_SENSOR: {}, DATA_CONFIG_ENTRIES: {}, DATA_DELETED_IDS: [], DATA_SENSOR: {}, } hass.data[DOMAIN] = { DATA_BINARY_SENSOR: app_config.get(DATA_BINARY_SENSOR, {}), DATA_CONFIG_ENTRIES: {}, DATA_DELETED_IDS: app_config.get(DATA_DELETED_IDS, []), DATA_DEVICES: {}, DATA_SENSOR: app_config.get(DATA_SENSOR, {}), DATA_STORE: store, } hass.http.register_view(RegistrationsView()) register_websocket_handlers(hass) for deleted_id in hass.data[DOMAIN][DATA_DELETED_IDS]: try: webhook_register( hass, DOMAIN, "Deleted Webhook", deleted_id, handle_webhook ) except ValueError: pass hass.async_create_task( discovery.async_load_platform(hass, "notify", DOMAIN, {}, config) ) return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool: """Set up Canary from a config entry.""" if not entry.options: options = { CONF_FFMPEG_ARGUMENTS: entry.data.get(CONF_FFMPEG_ARGUMENTS, DEFAULT_FFMPEG_ARGUMENTS), CONF_TIMEOUT: entry.data.get(CONF_TIMEOUT, DEFAULT_TIMEOUT), } hass.config_entries.async_update_entry(entry, options=options) try: canary_api = await hass.async_add_executor_job( _get_canary_api_instance, entry) except (ConnectTimeout, HTTPError) as error: _LOGGER.error("Unable to connect to Canary service: %s", str(error)) raise ConfigEntryNotReady from error coordinator = CanaryDataUpdateCoordinator(hass, api=canary_api) await coordinator.async_refresh() if not coordinator.last_update_success: raise ConfigEntryNotReady undo_listener = entry.add_update_listener(_async_update_listener) hass.data[DOMAIN][entry.entry_id] = { DATA_COORDINATOR: coordinator, DATA_UNDO_UPDATE_LISTENER: undo_listener, } for platform in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, platform)) return True
async def setup_smartapp_endpoint(hass: HomeAssistantType): """ Configure the SmartApp webhook in hass. SmartApps are an extension point within the SmartThings ecosystem and is used to receive push updates (i.e. device updates) from the cloud. """ from pysmartapp import Dispatcher, SmartAppManager data = hass.data.get(DOMAIN) if data: # already setup return # Get/create config to store a unique id for this hass instance. store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY) config = await store.async_load() if not config: # Create config config = { CONF_INSTANCE_ID: str(uuid4()), CONF_WEBHOOK_ID: webhook.generate_secret() } await store.async_save(config) # SmartAppManager uses a dispatcher to invoke callbacks when push events # occur. Use hass' implementation instead of the built-in one. dispatcher = Dispatcher(signal_prefix=SIGNAL_SMARTAPP_PREFIX, connect=functools.partial(async_dispatcher_connect, hass), send=functools.partial(async_dispatcher_send, hass)) manager = SmartAppManager(webhook.async_generate_path( config[CONF_WEBHOOK_ID]), dispatcher=dispatcher) manager.connect_install(functools.partial(smartapp_install, hass)) manager.connect_update(functools.partial(smartapp_update, hass)) manager.connect_uninstall(functools.partial(smartapp_uninstall, hass)) webhook.async_register(hass, DOMAIN, 'SmartApp', config[CONF_WEBHOOK_ID], smartapp_webhook) hass.data[DOMAIN] = { DATA_MANAGER: manager, CONF_INSTANCE_ID: config[CONF_INSTANCE_ID], DATA_BROKERS: {}, CONF_WEBHOOK_ID: config[CONF_WEBHOOK_ID], CONF_INSTALLED_APPS: [] }
async def async_setup(hass: HomeAssistantType, hass_config: ConfigType) -> bool: """xxx.""" async def load_system_config(store) -> Optional[Dict]: app_storage = await store.async_load() return dict(app_storage if app_storage else {}) if __version__ == evohome_rf.__version__: _LOGGER.warning( "evohome_cc v%s, using evohome_rf v%s - versions match (this is good)", __version__, evohome_rf.__version__, ) else: _LOGGER.error( "evohome_cc v%s, using evohome_rf v%s - versions don't match (this is bad)", __version__, evohome_rf.__version__, ) store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY) evohome_store = await load_system_config(store) _LOGGER.debug("Store = %s, Config = %s", evohome_store, hass_config[DOMAIN]) kwargs = dict(hass_config[DOMAIN]) serial_port = kwargs.pop("serial_port") kwargs["blocklist"] = dict.fromkeys(kwargs.pop("ignore_list", []), {}) try: # TODO: test invalid serial_port="AA" client = evohome_rf.Gateway(serial_port, loop=hass.loop, **kwargs) except serial.SerialException as exc: _LOGGER.exception("Unable to open serial port. Message is: %s", exc) return False hass.data[DOMAIN] = {} hass.data[DOMAIN]["broker"] = broker = EvoBroker(hass, client, store, hass_config[DOMAIN]) broker.hass_config = hass_config # #roker.loop_task = hass.async_create_task(client.start()) broker.loop_task = hass.loop.create_task(client.start()) hass.helpers.event.async_track_time_interval( broker.update, hass_config[DOMAIN][CONF_SCAN_INTERVAL]) return True
async def async_setup_platform( hass: HomeAssistantType, config: ConfigType, async_add_entities: Callable[[List[Any], bool], None], discovery_info: DiscoveryInfoType = None, ): """ Set up event sensors from configuration.yaml as a sensor platform. Left just to read deprecated manual configuration. """ if config: hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, data=config, context={"source": SOURCE_IMPORT})) _LOGGER.warning( "Manual yaml config is deprecated. " "You can remove it now, as it has been migrated to config entry, " "handled in the Integrations menu [Sensor %s, event: %s]", config.get(CONF_NAME), config.get(CONF_EVENT), ) return True
async def async_setup_entry(hass: HomeAssistantType, config_entry: ConfigType): """Set up TPLink from a config entry.""" config_data = hass.data[DOMAIN].get(ATTR_CONFIG) # These will contain the initialized devices lights = hass.data[DOMAIN][CONF_LIGHT] = [] switches = hass.data[DOMAIN][CONF_SWITCH] = [] # Add static devices static_devices = SmartDevices() if config_data is not None: static_devices = get_static_devices(config_data) lights.extend(static_devices.lights) switches.extend(static_devices.switches) # Add discovered devices if config_data is None or config_data[CONF_DISCOVERY]: discovered_devices = await async_discover_devices(hass, static_devices) lights.extend(discovered_devices.lights) switches.extend(discovered_devices.switches) forward_setup = hass.config_entries.async_forward_entry_setup if lights: _LOGGER.debug( "Got %s lights: %s", len(lights), ", ".join([d.host for d in lights]) ) hass.async_create_task(forward_setup(config_entry, "light")) if switches: _LOGGER.debug( "Got %s switches: %s", len(switches), ", ".join([d.host for d in switches]) ) hass.async_create_task(forward_setup(config_entry, "switch")) return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool: """Set up UltraSync from a config entry.""" if not entry.options: options = { CONF_SCAN_INTERVAL: entry.data.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL), } hass.config_entries.async_update_entry(entry, options=options) coordinator = UltraSyncDataUpdateCoordinator( hass, config=entry.data, options=entry.options, ) await coordinator.async_refresh() if not coordinator.last_update_success: raise ConfigEntryNotReady undo_listener = entry.add_update_listener(_async_update_listener) hass.data[DOMAIN][entry.entry_id] = { DATA_COORDINATOR: coordinator, DATA_UNDO_UPDATE_LISTENER: [undo_listener], SENSORS: {}, } for component in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, component)) _async_register_services(hass, coordinator) return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool: """Set up config entry.""" if CONF_TOKEN not in entry.data: # Init reauth flow hass.async_create_task( hass.config_entries.flow.async_init( NEATO_DOMAIN, context={CONF_SOURCE: SOURCE_REAUTH}, )) return False implementation = ( await config_entry_oauth2_flow.async_get_config_entry_implementation( hass, entry)) session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation) neato_session = api.ConfigEntryAuth(hass, entry, session) hass.data[NEATO_DOMAIN][entry.entry_id] = neato_session hub = NeatoHub(hass, Account(neato_session)) try: await hass.async_add_executor_job(hub.update_robots) except NeatoException as ex: _LOGGER.debug("Failed to connect to Neato API") raise ConfigEntryNotReady from ex hass.data[NEATO_LOGIN] = hub for component in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, component)) return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry): """Set up config entry for SmartHab integration.""" # Assign configuration variables username = entry.data[CONF_EMAIL] password = entry.data[CONF_PASSWORD] # Setup connection with SmartHab API hub = pysmarthab.SmartHab() try: await hub.async_login(username, password) except pysmarthab.RequestFailedException as err: _LOGGER.exception("Error while trying to reach SmartHab API") raise ConfigEntryNotReady from err # Pass hub object to child platforms hass.data[DOMAIN][entry.entry_id] = {DATA_HUB: hub} for component in COMPONENTS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, component)) return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry): """Set up Withings from a config entry.""" data_manager = get_data_manager(hass, entry) _LOGGER.debug("Confirming we're authenticated") try: await data_manager.check_authenticated() except NotAuthenticatedError: # Trigger new config flow. hass.async_create_task( hass.config_entries.flow.async_init( const.DOMAIN, context={ "source": SOURCE_USER, const.PROFILE: data_manager.profile }, data={}, )) return False hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, "sensor")) return True
async def async_setup(hass: HomeAssistantType, config: dict) -> bool: conf = config.get(DOMAIN) or { CONF_POWER_SENSOR_NAMING: DEFAULT_POWER_NAME_PATTERN, CONF_ENERGY_SENSOR_NAMING: DEFAULT_ENERGY_NAME_PATTERN, CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL, CONF_CREATE_ENERGY_SENSORS: True, CONF_CREATE_UTILITY_METERS: False, } hass.data[DOMAIN] = { DATA_CALCULATOR_FACTORY: PowerCalculatorStrategyFactory(hass), DOMAIN_CONFIG: conf, } return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool: """Set up Twente Milieu from a config entry.""" session = async_get_clientsession(hass) twentemilieu = TwenteMilieu( post_code=entry.data[CONF_POST_CODE], house_number=entry.data[CONF_HOUSE_NUMBER], house_letter=entry.data[CONF_HOUSE_LETTER], session=session, ) unique_id = entry.data[CONF_ID] hass.data.setdefault(DOMAIN, {})[unique_id] = twentemilieu hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, "sensor")) async def _interval_update(now=None) -> None: """Update Twente Milieu data.""" await _update_twentemilieu(hass, unique_id) async_track_time_interval(hass, _interval_update, SCAN_INTERVAL) return True
async def async_setup(hass: HomeAssistantType, config: ConfigType): """Set up the HEOS component.""" if DOMAIN not in config: return True host = config[DOMAIN][CONF_HOST] entries = hass.config_entries.async_entries(DOMAIN) if not entries: # Create new entry based on config hass.async_create_task( hass.config_entries.flow.async_init(DOMAIN, context={"source": "import"}, data={CONF_HOST: host})) else: # Check if host needs to be updated entry = entries[0] if entry.data[CONF_HOST] != host: hass.config_entries.async_update_entry(entry, title=format_title(host), data={ **entry.data, CONF_HOST: host }) return True
async def on_unload( hass: HomeAssistantType, entry: ConfigEntry | GatewayId, fnct: Callable ) -> None: """Register a callback to be called when entry is unloaded. This function is used by platforms to cleanup after themselves. """ if isinstance(entry, GatewayId): uniqueid = entry else: uniqueid = entry.entry_id key = MYSENSORS_ON_UNLOAD.format(uniqueid) if key not in hass.data[DOMAIN]: hass.data[DOMAIN][key] = [] hass.data[DOMAIN][key].append(fnct)
def async_get_clientsession(hass: HomeAssistantType, verify_ssl: bool = False) -> aiohttp.ClientSession: """Return default aiohttp ClientSession. This method must be run in the event loop. """ if verify_ssl: key = DATA_CLIENTSESSION else: key = DATA_CLIENTSESSION_NOTVERIFY if key not in hass.data: hass.data[key] = async_create_clientsession(hass, verify_ssl) return cast(aiohttp.ClientSession, hass.data[key])
async def async_setup_entry(hass: HomeAssistantType, entry, async_see): """Configure a dispatcher connection based on a config entry.""" async def _set_location(device, gps_location, battery, accuracy, attrs): """Fire HA event to set location.""" await async_see( dev_id=device, gps=gps_location, battery=battery, gps_accuracy=accuracy, attributes=attrs ) hass.data[DATA_KEY] = async_dispatcher_connect( hass, TRACKER_UPDATE, _set_location ) return True
def _setup_internal_discovery(hass: HomeAssistantType) -> None: """Set up the pychromecast internal discovery.""" if INTERNAL_DISCOVERY_RUNNING_KEY not in hass.data: hass.data[INTERNAL_DISCOVERY_RUNNING_KEY] = threading.Lock() if not hass.data[INTERNAL_DISCOVERY_RUNNING_KEY].acquire(blocking=False): # Internal discovery is already running return import pychromecast def internal_add_callback(name): """Handle zeroconf discovery of a new chromecast.""" mdns = listener.services[name] _discover_chromecast(hass, ChromecastInfo( service=name, host=mdns[0], port=mdns[1], uuid=mdns[2], model_name=mdns[3], friendly_name=mdns[4], )) def internal_remove_callback(name, mdns): """Handle zeroconf discovery of a removed chromecast.""" _remove_chromecast(hass, ChromecastInfo( service=name, host=mdns[0], port=mdns[1], uuid=mdns[2], model_name=mdns[3], friendly_name=mdns[4], )) _LOGGER.debug("Starting internal pychromecast discovery.") listener, browser = pychromecast.start_discovery(internal_add_callback, internal_remove_callback) ChromeCastZeroconf.set_zeroconf(browser.zc) def stop_discovery(event): """Stop discovery of new chromecasts.""" _LOGGER.debug("Stopping internal pychromecast discovery.") pychromecast.stop_discovery(browser) hass.data[INTERNAL_DISCOVERY_RUNNING_KEY].release() hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_discovery)
async def setup_smartapp_endpoint(hass: HomeAssistantType): """ Configure the SmartApp webhook in hass. SmartApps are an extension point within the SmartThings ecosystem and is used to receive push updates (i.e. device updates) from the cloud. """ from pysmartapp import Dispatcher, SmartAppManager data = hass.data.get(DOMAIN) if data: # already setup return # Get/create config to store a unique id for this hass instance. store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY) config = await store.async_load() if not config: # Create config config = { CONF_INSTANCE_ID: str(uuid4()), CONF_WEBHOOK_ID: webhook.generate_secret() } await store.async_save(config) # SmartAppManager uses a dispatcher to invoke callbacks when push events # occur. Use hass' implementation instead of the built-in one. dispatcher = Dispatcher( signal_prefix=SIGNAL_SMARTAPP_PREFIX, connect=functools.partial(async_dispatcher_connect, hass), send=functools.partial(async_dispatcher_send, hass)) manager = SmartAppManager( webhook.async_generate_path(config[CONF_WEBHOOK_ID]), dispatcher=dispatcher) manager.connect_install(functools.partial(smartapp_install, hass)) manager.connect_uninstall(functools.partial(smartapp_uninstall, hass)) webhook.async_register(hass, DOMAIN, 'SmartApp', config[CONF_WEBHOOK_ID], smartapp_webhook) hass.data[DOMAIN] = { DATA_MANAGER: manager, CONF_INSTANCE_ID: config[CONF_INSTANCE_ID], DATA_BROKERS: {}, CONF_WEBHOOK_ID: config[CONF_WEBHOOK_ID] }
def async_load_config(path: str, hass: HomeAssistantType, consider_home: timedelta): """Load devices from YAML configuration file. This method is a coroutine. """ dev_schema = vol.Schema({ vol.Required(CONF_NAME): cv.string, vol.Optional(CONF_ICON, default=None): vol.Any(None, cv.icon), vol.Optional('track', default=False): cv.boolean, vol.Optional(CONF_MAC, default=None): vol.Any(None, vol.All(cv.string, vol.Upper)), vol.Optional(CONF_AWAY_HIDE, default=DEFAULT_AWAY_HIDE): cv.boolean, vol.Optional('gravatar', default=None): vol.Any(None, cv.string), vol.Optional('picture', default=None): vol.Any(None, cv.string), vol.Optional(CONF_CONSIDER_HOME, default=consider_home): vol.All( cv.time_period, cv.positive_timedelta), }) try: result = [] try: devices = yield from hass.async_add_job( load_yaml_config_file, path) except HomeAssistantError as err: _LOGGER.error("Unable to load %s: %s", path, str(err)) return [] for dev_id, device in devices.items(): # Deprecated option. We just ignore it to avoid breaking change device.pop('vendor', None) try: device = dev_schema(device) device['dev_id'] = cv.slugify(dev_id) except vol.Invalid as exp: async_log_exception(exp, dev_id, devices, hass) else: result.append(Device(hass, **device)) return result except (HomeAssistantError, FileNotFoundError): # When YAML file could not be loaded/did not contain a dict return []
async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool: """Start the MQTT protocol service.""" conf = config.get(DOMAIN) # type: Optional[ConfigType] if conf is None: conf = CONFIG_SCHEMA({DOMAIN: {}})[DOMAIN] conf = cast(ConfigType, conf) client_id = conf.get(CONF_CLIENT_ID) # type: Optional[str] keepalive = conf.get(CONF_KEEPALIVE) # type: int # Only setup if embedded config passed in or no broker specified if CONF_EMBEDDED not in conf and CONF_BROKER in conf: broker_config = None else: broker_config = await _async_setup_server(hass, config) if CONF_BROKER in conf: broker = conf[CONF_BROKER] # type: str port = conf[CONF_PORT] # type: int username = conf.get(CONF_USERNAME) # type: Optional[str] password = conf.get(CONF_PASSWORD) # type: Optional[str] certificate = conf.get(CONF_CERTIFICATE) # type: Optional[str] client_key = conf.get(CONF_CLIENT_KEY) # type: Optional[str] client_cert = conf.get(CONF_CLIENT_CERT) # type: Optional[str] tls_insecure = conf.get(CONF_TLS_INSECURE) # type: Optional[bool] protocol = conf[CONF_PROTOCOL] # type: str elif broker_config is not None: # If no broker passed in, auto config to internal server broker, port, username, password, certificate, protocol = broker_config # Embedded broker doesn't have some ssl variables client_key, client_cert, tls_insecure = None, None, None # hbmqtt requires a client id to be set. if client_id is None: client_id = 'home-assistant' else: err = "Unable to start MQTT broker." if conf.get(CONF_EMBEDDED) is not None: # Explicit embedded config, requires explicit broker config err += " (Broker configuration required.)" _LOGGER.error(err) return False # For cloudmqtt.com, secured connection, auto fill in certificate if (certificate is None and 19999 < port < 30000 and broker.endswith('.cloudmqtt.com')): certificate = os.path.join(os.path.dirname(__file__), 'addtrustexternalcaroot.crt') # When the certificate is set to auto, use bundled certs from requests if certificate == 'auto': certificate = requests.certs.where() will_message = None # type: Optional[Message] if conf.get(CONF_WILL_MESSAGE) is not None: will_message = Message(**conf.get(CONF_WILL_MESSAGE)) birth_message = None # type: Optional[Message] if conf.get(CONF_BIRTH_MESSAGE) is not None: birth_message = Message(**conf.get(CONF_BIRTH_MESSAGE)) # Be able to override versions other than TLSv1.0 under Python3.6 conf_tls_version = conf.get(CONF_TLS_VERSION) # type: str if conf_tls_version == '1.2': tls_version = ssl.PROTOCOL_TLSv1_2 elif conf_tls_version == '1.1': tls_version = ssl.PROTOCOL_TLSv1_1 elif conf_tls_version == '1.0': tls_version = ssl.PROTOCOL_TLSv1 else: import sys # Python3.6 supports automatic negotiation of highest TLS version if sys.hexversion >= 0x03060000: tls_version = ssl.PROTOCOL_TLS # pylint: disable=no-member else: tls_version = ssl.PROTOCOL_TLSv1 try: hass.data[DATA_MQTT] = MQTT( hass, broker, port, client_id, keepalive, username, password, certificate, client_key, client_cert, tls_insecure, protocol, will_message, birth_message, tls_version) except socket.error: _LOGGER.exception("Can't connect to the broker. " "Please check your settings and the broker itself") return False async def async_stop_mqtt(event: Event): """Stop MQTT component.""" await hass.data[DATA_MQTT].async_disconnect() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, async_stop_mqtt) success = await hass.data[DATA_MQTT].async_connect() # type: bool if not success: return False async def async_publish_service(call: ServiceCall): """Handle MQTT publish service calls.""" msg_topic = call.data[ATTR_TOPIC] # type: str payload = call.data.get(ATTR_PAYLOAD) payload_template = call.data.get(ATTR_PAYLOAD_TEMPLATE) qos = call.data[ATTR_QOS] # type: int retain = call.data[ATTR_RETAIN] # type: bool if payload_template is not None: try: payload = \ template.Template(payload_template, hass).async_render() except template.jinja2.TemplateError as exc: _LOGGER.error( "Unable to publish to %s: rendering payload template of " "%s failed because %s", msg_topic, payload_template, exc) return await hass.data[DATA_MQTT].async_publish( msg_topic, payload, qos, retain) hass.services.async_register( DOMAIN, SERVICE_PUBLISH, async_publish_service, schema=MQTT_PUBLISH_SCHEMA) if conf.get(CONF_DISCOVERY): await _async_setup_discovery(hass, config) return True
def publish(hass: HomeAssistantType, topic, payload, qos=None, retain=None) -> None: """Publish message to an MQTT topic.""" hass.add_job(async_publish, hass, topic, payload, qos, retain)
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry): """Initialize config entry which represents an installed SmartApp.""" from pysmartthings import SmartThings if not hass.config.api.base_url.lower().startswith('https://'): _LOGGER.warning("The 'base_url' of the 'http' component must be " "configured and start with 'https://'") return False api = SmartThings(async_get_clientsession(hass), entry.data[CONF_ACCESS_TOKEN]) remove_entry = False try: # See if the app is already setup. This occurs when there are # installs in multiple SmartThings locations (valid use-case) manager = hass.data[DOMAIN][DATA_MANAGER] smart_app = manager.smartapps.get(entry.data[CONF_APP_ID]) if not smart_app: # Validate and setup the app. app = await api.app(entry.data[CONF_APP_ID]) smart_app = setup_smartapp(hass, app) # Validate and retrieve the installed app. installed_app = await validate_installed_app( api, entry.data[CONF_INSTALLED_APP_ID]) # Get devices and their current status devices = await api.devices( location_ids=[installed_app.location_id]) async def retrieve_device_status(device): try: await device.status.refresh() except ClientResponseError: _LOGGER.debug("Unable to update status for device: %s (%s), " "the device will be ignored", device.label, device.device_id, exc_info=True) devices.remove(device) await asyncio.gather(*[retrieve_device_status(d) for d in devices.copy()]) # Setup device broker broker = DeviceBroker(hass, devices, installed_app.installed_app_id) broker.event_handler_disconnect = \ smart_app.connect_event(broker.event_handler) hass.data[DOMAIN][DATA_BROKERS][entry.entry_id] = broker except ClientResponseError as ex: if ex.status in (401, 403): _LOGGER.exception("Unable to setup config entry '%s' - please " "reconfigure the integration", entry.title) remove_entry = True else: _LOGGER.debug(ex, exc_info=True) raise ConfigEntryNotReady except (ClientConnectionError, RuntimeWarning) as ex: _LOGGER.debug(ex, exc_info=True) raise ConfigEntryNotReady if remove_entry: hass.async_create_task( hass.config_entries.async_remove(entry.entry_id)) # only create new flow if there isn't a pending one for SmartThings. flows = hass.config_entries.flow.async_progress() if not [flow for flow in flows if flow['handler'] == DOMAIN]: hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={'source': 'import'})) return False for component in SUPPORTED_PLATFORMS: hass.async_create_task(hass.config_entries.async_forward_entry_setup( entry, component)) return True