async def async_added_to_hass(self): """Connect camera methods to signals.""" def _dispatch_proxy(method): """Expand parameters & filter entity IDs.""" async def _call(params): entity_ids = params.get(ATTR_ENTITY_ID) filtered_params = {k: v for k, v in params.items() if k != ATTR_ENTITY_ID} if entity_ids is None or self.entity_id in entity_ids: await method(**filtered_params) return _call self._listeners.extend([ async_dispatcher_connect( self.hass, SIGNAL_LOGI_CIRCLE_RECONFIGURE, _dispatch_proxy(self.set_config)), async_dispatcher_connect( self.hass, SIGNAL_LOGI_CIRCLE_SNAPSHOT, _dispatch_proxy(self.livestream_snapshot)), async_dispatcher_connect( self.hass, SIGNAL_LOGI_CIRCLE_RECORD, _dispatch_proxy(self.download_livestream)), ])
async def async_added_to_hass(self): """Store entity_id and register state change callback.""" entity_id_key = self._addr or self._type self._data[entity_id_key] = self.entity_id async_dispatcher_connect( self.hass, SIGNAL_SENSOR_UPDATE.format(self.entity_id), self.async_set_state)
async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the deCONZ lights and groups from a config entry.""" gateway = hass.data[DECONZ_DOMAIN] @callback def async_add_light(lights): """Add light from deCONZ.""" entities = [] for light in lights: if light.type not in COVER_TYPES + SWITCH_TYPES: entities.append(DeconzLight(light, gateway)) async_add_entities(entities, True) gateway.listeners.append( async_dispatcher_connect(hass, 'deconz_new_light', async_add_light)) @callback def async_add_group(groups): """Add group from deCONZ.""" entities = [] allow_group = config_entry.data.get(CONF_ALLOW_DECONZ_GROUPS, True) for group in groups: if group.lights and allow_group: entities.append(DeconzLight(group, gateway)) async_add_entities(entities, True) gateway.listeners.append( async_dispatcher_connect(hass, 'deconz_new_group', async_add_group)) async_add_light(gateway.api.lights.values()) async_add_group(gateway.api.groups.values())
async def async_setup_platform(hass: HomeAssistantType, config: ConfigType, async_add_devices, discovery_info=None): """Set up the cast platform.""" import pychromecast # Import CEC IGNORE attributes pychromecast.IGNORE_CEC += config.get(CONF_IGNORE_CEC, []) hass.data.setdefault(ADDED_CAST_DEVICES_KEY, {}) hass.data.setdefault(KNOWN_CHROMECASTS_KEY, {}) # None -> use discovery; (host, port) -> manually specify chromecast. want_host = None if discovery_info: want_host = (discovery_info.get('host'), discovery_info.get('port')) elif CONF_HOST in config: want_host = (config.get(CONF_HOST), DEFAULT_PORT) enable_discovery = False if want_host is None: # We were explicitly told to enable pychromecast discovery. enable_discovery = True elif want_host[1] != DEFAULT_PORT: # We're trying to add a group, so we have to use pychromecast's # discovery to get the correct friendly name. enable_discovery = True if enable_discovery: @callback def async_cast_discovered(chromecast): """Callback for when a new chromecast is discovered.""" if want_host is not None and \ (chromecast.host, chromecast.port) != want_host: return # for groups, only add requested device cast_device = _async_create_cast_device(hass, chromecast) if cast_device is not None: async_add_devices([cast_device]) async_dispatcher_connect(hass, SIGNAL_CAST_DISCOVERED, async_cast_discovered) # Re-play the callback for all past chromecasts, store the objects in # a list to avoid concurrent modification resulting in exception. for chromecast in list(hass.data[KNOWN_CHROMECASTS_KEY].values()): async_cast_discovered(chromecast) hass.async_add_job(_setup_internal_discovery, hass) else: # Manually add a "normal" Chromecast, we can do that without discovery. try: chromecast = await hass.async_add_job( pychromecast.Chromecast, *want_host) except pychromecast.ChromecastConnectionError as err: _LOGGER.warning("Can't set up chromecast on %s: %s", want_host[0], err) raise PlatformNotReady key = (chromecast.host, chromecast.port, chromecast.uuid) cast_device = _async_create_cast_device(hass, chromecast) if cast_device is not None: hass.data[KNOWN_CHROMECASTS_KEY][key] = chromecast async_add_devices([cast_device])
async def async_added_to_hass(self): """Register update callback.""" # Remove temporary bogus entity_id if added tmp_entity = TMP_ENTITY.format(self._device_id) if tmp_entity in self.hass.data[DATA_ENTITY_LOOKUP][ EVENT_KEY_SENSOR][self._device_id]: self.hass.data[DATA_ENTITY_LOOKUP][ EVENT_KEY_SENSOR][self._device_id].remove(tmp_entity) # Register id and aliases self.hass.data[DATA_ENTITY_LOOKUP][ EVENT_KEY_SENSOR][self._device_id].append(self.entity_id) if self._aliases: for _id in self._aliases: self.hass.data[DATA_ENTITY_LOOKUP][ EVENT_KEY_SENSOR][_id].append(self.entity_id) async_dispatcher_connect(self.hass, SIGNAL_AVAILABILITY, self._availability_callback) async_dispatcher_connect(self.hass, SIGNAL_HANDLE_EVENT.format(self.entity_id), self.handle_event_callback) # Process the initial event now that the entity is created if self._initial_event: self.handle_event_callback(self._initial_event)
async def async_setup_entry(hass, config_entry, async_add_devices): """Set up the deCONZ lights and groups from a config entry.""" @callback def async_add_light(lights): """Add light from deCONZ.""" entities = [] for light in lights: entities.append(DeconzLight(light)) async_add_devices(entities, True) hass.data[DATA_DECONZ_UNSUB].append( async_dispatcher_connect(hass, 'deconz_new_light', async_add_light)) @callback def async_add_group(groups): """Add group from deCONZ.""" entities = [] allow_group = config_entry.data.get(CONF_ALLOW_DECONZ_GROUPS, True) for group in groups: if group.lights and allow_group: entities.append(DeconzLight(group)) async_add_devices(entities, True) hass.data[DATA_DECONZ_UNSUB].append( async_dispatcher_connect(hass, 'deconz_new_group', async_add_group)) async_add_light(hass.data[DATA_DECONZ].lights.values()) async_add_group(hass.data[DATA_DECONZ].groups.values())
def test_receiving_non_utf8_message_gets_logged(self): """Test receiving a non utf8 encoded message.""" calls = [] @callback def record(topic, payload, qos): """Helper to record calls.""" data = { 'topic': topic, 'payload': payload, 'qos': qos, } calls.append(data) async_dispatcher_connect( self.hass, mqtt.SIGNAL_MQTT_MESSAGE_RECEIVED, record) payload = 0x9a topic = 'test_topic' MQTTMessage = namedtuple('MQTTMessage', ['topic', 'qos', 'payload']) message = MQTTMessage(topic, 1, payload) with self.assertLogs(level='ERROR') as test_handle: self.hass.data['mqtt']._mqtt_on_message( None, {'hass': self.hass}, message) self.hass.block_till_done() self.assertIn( "ERROR:homeassistant.components.mqtt:Illegal utf-8 unicode " "payload from MQTT topic: %s, Payload: " % topic, test_handle.output[0])
async def async_added_to_hass(self): """Call when entity is added to hass.""" signal = ENTITY_SIGNAL.format(self._addr) _LOGGER.debug('connecting %s', signal) async_dispatcher_connect( self.hass, signal, self._update_callback) self._controller.request_dimmer_level(self._addr)
def async_added_to_hass(self): """Register callbacks.""" async_dispatcher_connect( self.hass, SIGNAL_ZONE_FAULT, self._fault_callback) async_dispatcher_connect( self.hass, SIGNAL_ZONE_RESTORE, self._restore_callback)
async def async_setup_entry(hass, config_entry, async_add_entities): """Set up mobile app sensor from a config entry.""" entities = list() webhook_id = config_entry.data[CONF_WEBHOOK_ID] for config in hass.data[DOMAIN][ENTITY_TYPE].values(): if config[CONF_WEBHOOK_ID] != webhook_id: continue device = hass.data[DOMAIN][DATA_DEVICES][webhook_id] entities.append(MobileAppSensor(config, device, config_entry)) async_add_entities(entities) @callback def handle_sensor_registration(webhook_id, data): if data[CONF_WEBHOOK_ID] != webhook_id: return device = hass.data[DOMAIN][DATA_DEVICES][data[CONF_WEBHOOK_ID]] async_add_entities([MobileAppSensor(data, device, config_entry)]) async_dispatcher_connect(hass, '{}_{}_register'.format(DOMAIN, ENTITY_TYPE), partial(handle_sensor_registration, webhook_id))
async def test_event_handler_dispatches_updated_devices( hass, config_entry, device_factory, event_request_factory): """Test the event handler dispatches updated devices.""" devices = [ device_factory('Bedroom 1 Switch', ['switch']), device_factory('Bathroom 1', ['switch']), device_factory('Sensor', ['motionSensor']), ] device_ids = [devices[0].device_id, devices[1].device_id, devices[2].device_id] request = event_request_factory(device_ids) config_entry.data[CONF_INSTALLED_APP_ID] = request.installed_app_id called = False def signal(ids): nonlocal called called = True assert device_ids == ids async_dispatcher_connect(hass, SIGNAL_SMARTTHINGS_UPDATE, signal) broker = smartthings.DeviceBroker( hass, config_entry, Mock(), Mock(), devices, []) broker.connect() # pylint:disable=protected-access await broker._event_handler(request, None, None) await hass.async_block_till_done() assert called for device in devices: assert device.status.values['Updated'] == 'Value'
async def test_event_handler_dispatches_updated_devices( hass, device_factory, event_request_factory): """Test the event handler dispatches updated devices.""" devices = [ device_factory('Bedroom 1 Switch', ['switch']), device_factory('Bathroom 1', ['switch']), device_factory('Sensor', ['motionSensor']), ] device_ids = [devices[0].device_id, devices[1].device_id, devices[2].device_id] request = event_request_factory(device_ids) called = False def signal(ids): nonlocal called called = True assert device_ids == ids async_dispatcher_connect(hass, SIGNAL_SMARTTHINGS_UPDATE, signal) broker = smartthings.DeviceBroker( hass, devices, request.installed_app_id) await broker.event_handler(request, None, None) await hass.async_block_till_done() assert called for device in devices: assert device.status.attributes['Updated'] == 'Value'
def test_receiving_mqtt_message_fires_hass_event(self): """Test if receiving triggers an event.""" calls = [] @callback def record(topic, payload, qos): """Helper to record calls.""" data = { 'topic': topic, 'payload': payload, 'qos': qos, } calls.append(data) async_dispatcher_connect( self.hass, mqtt.SIGNAL_MQTT_MESSAGE_RECEIVED, record) MQTTMessage = namedtuple('MQTTMessage', ['topic', 'qos', 'payload']) message = MQTTMessage('test_topic', 1, 'Hello World!'.encode('utf-8')) self.hass.data['mqtt']._mqtt_on_message( None, {'hass': self.hass}, message) self.hass.block_till_done() self.assertEqual(1, len(calls)) last_event = calls[0] self.assertEqual(bytearray('Hello World!', 'utf-8'), last_event['payload']) self.assertEqual(message.topic, last_event['topic']) self.assertEqual(message.qos, last_event['qos'])
async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the deCONZ lights and groups from a config entry.""" gateway = get_gateway_from_config_entry(hass, config_entry) @callback def async_add_light(lights): """Add light from deCONZ.""" entities = [] for light in lights: if light.type not in COVER_TYPES + SWITCH_TYPES: entities.append(DeconzLight(light, gateway)) async_add_entities(entities, True) gateway.listeners.append(async_dispatcher_connect( hass, gateway.async_event_new_device(NEW_LIGHT), async_add_light)) @callback def async_add_group(groups): """Add group from deCONZ.""" entities = [] for group in groups: if group.lights and gateway.allow_deconz_groups: entities.append(DeconzLight(group, gateway)) async_add_entities(entities, True) gateway.listeners.append(async_dispatcher_connect( hass, gateway.async_event_new_device(NEW_GROUP), async_add_group)) async_add_light(gateway.api.lights.values()) async_add_group(gateway.api.groups.values())
def __init__(self, hass, config, port): """Initialize Axis Communications camera component.""" super().__init__(hass, config) self.port = port async_dispatcher_connect(hass, DOMAIN + '_' + config[CONF_NAME] + '_new_ip', self._new_ip)
async def async_added_to_hass(self): """Register update signal handler.""" async def async_update_state(): """Update device state.""" await self.async_update_ha_state(True) async_dispatcher_connect(self.hass, SIGNAL_NEST_UPDATE, async_update_state)
async def async_added_to_hass(self): """Register callbacks.""" tag_id = self.tag_id event_type = self.device_class async_dispatcher_connect( self.hass, SIGNAL_BINARY_EVENT_UPDATE.format(tag_id, event_type), self._on_binary_event_callback)
async def async_setup_entry(hass, config_entry, async_add_entities): """Set up Z-Wave Cover from Config Entry.""" @callback def async_add_cover(cover): """Add Z-Wave Cover.""" async_add_entities([cover]) async_dispatcher_connect(hass, 'zwave_new_cover', async_add_cover)
async def async_setup_entry(hass, config_entry, async_add_entities): """Set up Z-Wave Switch from Config Entry.""" @callback def async_add_switch(switch): """Add Z-Wave Switch.""" async_add_entities([switch]) async_dispatcher_connect(hass, 'zwave_new_switch', async_add_switch)
async def async_added_to_hass(self): """Register update callback.""" self._client.register_status_callback(self.handle_event_callback, self._device_port) self._is_on = await self._client.status(self._device_port) async_dispatcher_connect(self.hass, SIGNAL_AVAILABILITY.format(self._device_id), self._availability_callback)
async def async_added_to_hass(self): """Call when entity is added to hass.""" dev_id = self.tuya.object_id() self.hass.data[DOMAIN]['entities'][dev_id] = self.entity_id async_dispatcher_connect( self.hass, SIGNAL_DELETE_ENTITY, self._delete_callback) async_dispatcher_connect( self.hass, SIGNAL_UPDATE_ENTITY, self._update_callback)
async def async_setup_entry(hass, config_entry, async_add_entities): """Set up Z-Wave Climate device from Config Entry.""" @callback def async_add_climate(climate): """Add Z-Wave Climate Device.""" async_add_entities([climate]) async_dispatcher_connect(hass, 'zwave_new_climate', async_add_climate)
async def async_setup_entry(hass, config_entry, async_add_entities): """Set up Z-Wave Light from Config Entry.""" @callback def async_add_light(light): """Add Z-Wave Light.""" async_add_entities([light]) async_dispatcher_connect(hass, 'zwave_new_light', async_add_light)
def async_added_to_hass(self): """Register dispatcher and callbacks.""" @callback def async_update_image(image): """Update image from dispatcher call.""" self._image = image async_dispatcher_connect(self.hass, self._signal, async_update_image)
async def async_added_to_hass(self): """Call when entity is added to hass.""" self._remove_signal_delete = async_dispatcher_connect( self.hass, SIGNAL_DELETE_ENTITY.format(self._external_id), self._delete_callback) self._remove_signal_update = async_dispatcher_connect( self.hass, SIGNAL_UPDATE_ENTITY.format(self._external_id), self._update_callback)
async def async_setup_entry(hass, config_entry, async_add_entities): """Set up Z-Wave Fan from Config Entry.""" @callback def async_add_fan(fan): """Add Z-Wave Fan.""" async_add_entities([fan]) async_dispatcher_connect(hass, 'zwave_new_fan', async_add_fan)
async def async_setup_entry(hass, config_entry, async_add_entities): """Set up Z-Wave Sensor from Config Entry.""" @callback def async_add_sensor(sensor): """Add Z-Wave Sensor.""" async_add_entities([sensor]) async_dispatcher_connect(hass, 'zwave_new_sensor', async_add_sensor)
def async_added_to_hass(self): """Register update dispatcher.""" @callback def async_eight_heat_update(): """Update callback.""" self.hass.async_add_job(self.async_update_ha_state(True)) async_dispatcher_connect( self.hass, SIGNAL_UPDATE_HEAT, async_eight_heat_update)
async def async_added_to_hass(self): """Register update dispatcher.""" @callback def async_eight_user_update(): """Update callback.""" self.async_schedule_update_ha_state(True) async_dispatcher_connect( self.hass, SIGNAL_UPDATE_USER, async_eight_user_update)
def __init__(self, hass, addr, name): """Register callback that will be used for signals.""" self._hass = hass self._addr = addr self._name = name self._id = slugify(self._name) signal = ENTITY_SIGNAL.format(self._addr) async_dispatcher_connect( self._hass, signal, self._update_callback)
async def async_added_to_hass(self): """Register callbacks.""" self._remove_dispatcher = async_dispatcher_connect( self._hass, SIGNALS[CURRENT_DOMAIN], self.update_data)
async def async_added_to_hass(self): """Register update dispatcher.""" self.async_on_remove( async_dispatcher_connect(self.hass, SIGNAL_STATE_UPDATED, self.async_write_ha_state))
def async_added_to_hass(self): """Register update callback.""" dev_id = id(self.gateway), self.node_id, self.child_id, self.value_type async_dispatcher_connect( self.hass, SIGNAL_CALLBACK.format(*dev_id), self._async_update_callback)
async def async_added_to_hass(self): self._handle_update() self.async_on_remove( async_dispatcher_connect(self._kettler.hass, 'ready4skyupdate', self._handle_update))
async def async_added_to_hass(self) -> None: """Connect dispatcher to signal from server.""" self._disconnect_dispatcher = async_dispatcher_connect( self.hass, self._server.signal_name, self._update_callback)
async def async_start(self): """Stop watching for device tracker registrations.""" self._unsub = async_dispatcher_connect( self.hass, CONNECTED_DEVICE_REGISTERED, self._async_process_device_state)
async def async_setup_entry(hass, entry, async_add_entities): """Set up the switch platform.""" coordinator = hass.data[DOMAIN][entry.entry_id][COORDINATOR] device = hass.data[DOMAIN][entry.entry_id] entities = [] data_unit = entry.options.get(CONF_SPEED_UNITS, DATA_RATE_MEGABITS_PER_SECOND) for system_id, system in coordinator.data.items(): for dev_id, device in system["devices"].items(): device_name = f"{device['friendlyName']}" if device.get("friendlyType"): device_name = device_name + f" ({device['friendlyType']})" entity = GoogleWifiSwitch( coordinator=coordinator, name=device_name, icon=DEFAULT_ICON, system_id=system_id, item_id=dev_id, data_unit=data_unit, ) entities.append(entity) async_add_entities(entities) async def async_new_entities(device_info): """Add new entities when they connect to Google Wifi.""" system_id = device_info["system_id"] device_id = device_info["device_id"] device = device_info["device"] device_name = f"{device['friendlyName']}" if device.get("friendlyType"): device_name = device_name + f" ({device['friendlyType']})" entity = GoogleWifiSwitch( coordinator=coordinator, name=device_name, icon=DEFAULT_ICON, system_id=system_id, item_id=device_id, data_unit=data_unit, ) entities = [entity] async_add_entities(entities) async_dispatcher_connect(hass, SIGNAL_ADD_DEVICE, async_new_entities) # register service for reset platform = entity_platform.current_platform.get() platform.async_register_entity_service( SERVICE_PRIORITIZE, {vol.Required("duration"): cv.positive_int}, "async_prioritize_device", ) platform.async_register_entity_service( SERVICE_CLEAR_PRIORITIZATION, {}, "async_clear_prioritization", ) return True
async def async_added_to_hass(self) -> None: """Set up a listener when this entity is added to HA.""" async_dispatcher_connect(self.hass, DOMAIN, self._update_callback)
async def async_added_to_hass(self): """Connect and subscribe to dispatcher signals and state updates.""" async_dispatcher_connect(self.hass, DOMAIN, self.async_signal_handler) await self._client.register_state_update_callback( self.async_handle_state_update)
async def async_added_to_hass(self): """Register callbacks.""" # register callback when cached SensorPush data has been updated async_dispatcher_connect(self.hass, SIGNAL_SENSORPUSH_UPDATED, self._update_callback)
async def async_added_to_hass(self): """Register callbacks.""" async_dispatcher_connect(self.hass, SIGNAL_KEYPAD_UPDATE, self._update_callback) async_dispatcher_connect(self.hass, SIGNAL_PARTITION_UPDATE, self._update_callback)
async def async_setup_trigger(hass, tasmota_trigger, config_entry, discovery_hash): """Set up a discovered Tasmota device trigger.""" discovery_id = tasmota_trigger.cfg.trigger_id remove_update_signal = None _LOGGER.debug("Discovered trigger with ID: %s '%s'", discovery_id, tasmota_trigger.cfg) async def discovery_update(trigger_config): """Handle discovery update.""" _LOGGER.debug("Got update for trigger with hash: %s '%s'", discovery_hash, trigger_config) if not trigger_config.is_active: # Empty trigger_config: Remove trigger _LOGGER.debug("Removing trigger: %s", discovery_hash) if discovery_id in hass.data[DEVICE_TRIGGERS]: device_trigger = hass.data[DEVICE_TRIGGERS][discovery_id] await device_trigger.tasmota_trigger.unsubscribe_topics() device_trigger.detach_trigger() clear_discovery_hash(hass, discovery_hash) remove_update_signal() return device_trigger = hass.data[DEVICE_TRIGGERS][discovery_id] if device_trigger.tasmota_trigger.config_same(trigger_config): # Unchanged payload: Ignore to avoid unnecessary unsubscribe / subscribe _LOGGER.debug("Ignoring unchanged update for: %s", discovery_hash) return # Non-empty, changed trigger_config: Update trigger _LOGGER.debug("Updating trigger: %s", discovery_hash) device_trigger.tasmota_trigger.config_update(trigger_config) await device_trigger.update_tasmota_trigger(trigger_config, remove_update_signal) await device_trigger.arm_tasmota_trigger() return remove_update_signal = async_dispatcher_connect( hass, TASMOTA_DISCOVERY_ENTITY_UPDATED.format(*discovery_hash), discovery_update) device_registry = await hass.helpers.device_registry.async_get_registry() device = device_registry.async_get_device( set(), {(CONNECTION_NETWORK_MAC, tasmota_trigger.cfg.mac)}, ) if device is None: return if DEVICE_TRIGGERS not in hass.data: hass.data[DEVICE_TRIGGERS] = {} if discovery_id not in hass.data[DEVICE_TRIGGERS]: device_trigger = Trigger( hass=hass, device_id=device.id, discovery_hash=discovery_hash, subtype=tasmota_trigger.cfg.subtype, tasmota_trigger=tasmota_trigger, type=tasmota_trigger.cfg.type, remove_update_signal=remove_update_signal, ) hass.data[DEVICE_TRIGGERS][discovery_id] = device_trigger else: # This Tasmota trigger is wanted by device trigger(s), set them up device_trigger = hass.data[DEVICE_TRIGGERS][discovery_id] await device_trigger.set_tasmota_trigger(tasmota_trigger, remove_update_signal) await device_trigger.arm_tasmota_trigger()
async def async_added_to_hass(self): """Subscribe to updates from the component.""" _LOGGER.debug("Added OpenTherm Gateway sensor %s", self._friendly_name) self._unsub_updates = async_dispatcher_connect( self.hass, self._gateway.update_signal, self.receive_report)
def __init__(self, hass, name): """Initialize Asterisk mailbox.""" super().__init__(hass, name) async_dispatcher_connect(self.hass, SIGNAL_MESSAGE_UPDATE, self._update_callback)
if not skip: tracked.add(entity.unique_id) known_entities.append(HuaweiLteScannerEntity(router, mac)) async_add_entities(known_entities, True) # Tell parent router to poll hosts list to gather new devices router.subscriptions[KEY_LAN_HOST_INFO].add(_DEVICE_SCAN) router.subscriptions[KEY_WLAN_HOST_LIST].add(_DEVICE_SCAN) async def _async_maybe_add_new_entities(url: str) -> None: """Add new entities if the update signal comes from our router.""" if url == router.url: async_add_new_entities(hass, url, async_add_entities, tracked) # Register to handle router data updates disconnect_dispatcher = async_dispatcher_connect( hass, UPDATE_SIGNAL, _async_maybe_add_new_entities) config_entry.async_on_unload(disconnect_dispatcher) # Add new entities from initial scan async_add_new_entities(hass, router.url, async_add_entities, tracked) def _is_wireless(host: _HostType) -> bool: # LAN host info entries have an "InterfaceType" property, "Ethernet" / "Wireless". # WLAN host list ones don't, but they're expected to be all wireless. return cast(str, host.get("InterfaceType", "Wireless")) != "Ethernet" def _is_connected(host: _HostType | None) -> bool: # LAN host info entries have an "Active" property, "1" or "0". # WLAN host list ones don't, but that call appears to return active hosts only.
async def async_added_to_hass(self): """Handle entity which will be added.""" self._unsub_update = async_dispatcher_connect( self._hass, DATA_UPDATED, self._schedule_immediate_update )
async def async_setup_entry(hass, config_entry, async_add_entities): """Set up Sonos from a config entry.""" if DATA_SONOS not in hass.data: hass.data[DATA_SONOS] = SonosData(hass) config = hass.data[SONOS_DOMAIN].get("media_player", {}) _LOGGER.debug("Reached async_setup_entry, config=%s", config) advertise_addr = config.get(CONF_ADVERTISE_ADDR) if advertise_addr: pysonos.config.EVENT_ADVERTISE_IP = advertise_addr def _discovery(now=None): """Discover players from network or configuration.""" hosts = config.get(CONF_HOSTS) def _discovered_player(soco): """Handle a (re)discovered player.""" try: _LOGGER.debug("Reached _discovered_player, soco=%s", soco) if soco not in hass.data[DATA_SONOS].discovered: _LOGGER.debug("Adding new entity") hass.data[DATA_SONOS].discovered.append(soco) hass.add_job(async_add_entities, [SonosEntity(soco)]) else: entity = _get_entity_from_soco_uid(hass, soco.uid) if entity: _LOGGER.debug("Seen %s", entity) hass.add_job(entity.async_seen()) except SoCoException as ex: _LOGGER.debug("SoCoException, ex=%s", ex) if hosts: for host in hosts: try: _LOGGER.debug("Testing %s", host) player = pysonos.SoCo(socket.gethostbyname(host)) if player.is_visible: # Make sure that the player is available _ = player.volume _discovered_player(player) except (OSError, SoCoException) as ex: _LOGGER.debug("Exception %s", ex) if now is None: _LOGGER.warning("Failed to initialize '%s'", host) _LOGGER.debug("Tested all hosts") hass.helpers.event.call_later(DISCOVERY_INTERVAL, _discovery) else: _LOGGER.debug("Starting discovery thread") pysonos.discover_thread( _discovered_player, interval=DISCOVERY_INTERVAL, interface_addr=config.get(CONF_INTERFACE_ADDR), ) _LOGGER.debug("Adding discovery job") hass.async_add_executor_job(_discovery) async def async_service_handle(service, data): """Handle dispatched services.""" entity_ids = data.get("entity_id") entities = hass.data[DATA_SONOS].entities if entity_ids and entity_ids != ENTITY_MATCH_ALL: entities = [e for e in entities if e.entity_id in entity_ids] if service == SERVICE_JOIN: master = [ e for e in hass.data[DATA_SONOS].entities if e.entity_id == data[ATTR_MASTER] ] if master: await SonosEntity.join_multi(hass, master[0], entities) elif service == SERVICE_UNJOIN: await SonosEntity.unjoin_multi(hass, entities) elif service == SERVICE_SNAPSHOT: await SonosEntity.snapshot_multi(hass, entities, data[ATTR_WITH_GROUP]) elif service == SERVICE_RESTORE: await SonosEntity.restore_multi(hass, entities, data[ATTR_WITH_GROUP]) else: for entity in entities: if service == SERVICE_SET_TIMER: call = entity.set_sleep_timer elif service == SERVICE_CLEAR_TIMER: call = entity.clear_sleep_timer elif service == SERVICE_UPDATE_ALARM: call = entity.set_alarm elif service == SERVICE_SET_OPTION: call = entity.set_option elif service == SERVICE_PLAY_QUEUE: call = entity.play_queue hass.async_add_executor_job(call, data) # We are ready for the next service call hass.data[DATA_SERVICE_EVENT].set() async_dispatcher_connect(hass, SONOS_DOMAIN, async_service_handle)
async def async_added_to_hass(self) -> None: """Register callbacks.""" self.async_on_remove( async_dispatcher_connect(self.hass, TOPIC_UPDATE, self._async_update)) self._async_update()
async def async_added_to_hass(self): """Register callbacks.""" async_dispatcher_connect( self.hass, SIGNAL_TAG_UPDATE.format(self.tag_id, self.tag_manager_mac), self._update_tag_info_callback)
async def async_added_to_hass(self) -> None: """Subscribe to discovery updates.""" await super().async_added_to_hass() self._removed_from_hass = False discovery_hash = (self._discovery_data[ATTR_DISCOVERY_HASH] if self._discovery_data else None) async def _async_remove_state_and_registry_entry(self) -> None: """Remove entity's state and entity registry entry. Remove entity from entity registry if it is registered, this also removes the state. If the entity is not in the entity registry, just remove the state. """ entity_registry = ( await self.hass.helpers.entity_registry.async_get_registry()) if entity_registry.async_is_registered(self.entity_id): entity_entry = entity_registry.async_get(self.entity_id) entity_registry.async_remove(self.entity_id) await cleanup_device_registry(self.hass, entity_entry.device_id) else: await self.async_remove(force_remove=True) async def discovery_callback(payload): """Handle discovery update.""" _LOGGER.info( "Got update for entity with hash: %s '%s'", discovery_hash, payload, ) old_payload = self._discovery_data[ATTR_DISCOVERY_PAYLOAD] debug_info.update_entity_discovery_data(self.hass, payload, self.entity_id) if not payload: # Empty payload: Remove component _LOGGER.info("Removing component: %s", self.entity_id) self._cleanup_discovery_on_remove() await _async_remove_state_and_registry_entry(self) elif self._discovery_update: if old_payload != self._discovery_data[ATTR_DISCOVERY_PAYLOAD]: # Non-empty, changed payload: Notify component _LOGGER.info("Updating component: %s", self.entity_id) await self._discovery_update(payload) else: # Non-empty, unchanged payload: Ignore to avoid changing states _LOGGER.info("Ignoring unchanged update for: %s", self.entity_id) async_dispatcher_send(self.hass, MQTT_DISCOVERY_DONE.format(discovery_hash), None) if discovery_hash: debug_info.add_entity_discovery_data(self.hass, self._discovery_data, self.entity_id) # Set in case the entity has been removed and is re-added, for example when changing entity_id set_discovery_hash(self.hass, discovery_hash) self._remove_signal = async_dispatcher_connect( self.hass, MQTT_DISCOVERY_UPDATED.format(discovery_hash), discovery_callback, ) async_dispatcher_send(self.hass, MQTT_DISCOVERY_DONE.format(discovery_hash), None)
def async_added_to_hass(self): """Register callbacks.""" async_dispatcher_connect(self.hass, SIGNAL_UPDATE_RAINCLOUD, self._update_callback)
async def async_added_to_hass(self): """Register state update callback.""" self._unsub_dispatcher = async_dispatcher_connect( self.hass, self._account.signal_device_update, self.async_write_ha_state)
async def async_added_to_hass(self) -> None: """Connect to update signals.""" self._unsub_handlers.append( async_dispatcher_connect(self.hass, UPDATE_SIGNAL, self._async_maybe_update))
async def async_added_to_hass(self) -> None: """Run when entity about to be added to hass.""" self.async_on_remove( async_dispatcher_connect(self.hass, SIGNAL_SWITCHER_DEVICE_UPDATE, self.async_update_data))
device = hass_data.device_manager.device_map[device_id] if descriptions := BINARY_SENSORS.get(device.category): for description in descriptions: dpcode = description.dpcode or description.key if dpcode in device.status: entities.append( TuyaBinarySensorEntity(device, hass_data.device_manager, description)) async_add_entities(entities) async_discover_device([*hass_data.device_manager.device_map]) entry.async_on_unload( async_dispatcher_connect(hass, TUYA_DISCOVERY_NEW, async_discover_device)) class TuyaBinarySensorEntity(TuyaEntity, BinarySensorEntity): """Tuya Binary Sensor Entity.""" entity_description: TuyaBinarySensorEntityDescription def __init__( self, device: TuyaDevice, device_manager: TuyaDeviceManager, description: TuyaBinarySensorEntityDescription, ) -> None: """Init Tuya binary sensor.""" super().__init__(device, device_manager)
async def async_added_to_hass(self) -> None: """Run when entity about to be added to hass.""" async_dispatcher_connect(self.hass, DOMAIN, self._refresh)
async def async_added_to_hass(self): """Call when entity is added to HOme Assistant.""" await super().async_added_to_hass() self._async_unsub_hook_dispatcher_connect = async_dispatcher_connect( self.hass, SIGNAL_WEBHOOK, self._webhook_event )
async def async_added_to_hass(self): """Call when entity is added to hass.""" _LOGGER.debug("Created device %s", self) self._async_unsub_dispatcher_connect = async_dispatcher_connect( self.hass, SIGNAL_UPDATE_ENTITY, self._update_callback) await self._update_callback()
async def async_setup_entry(hass, entry): """Set up Plex from a config entry.""" server_config = entry.data[PLEX_SERVER_CONFIG] if entry.unique_id is None: hass.config_entries.async_update_entry( entry, unique_id=entry.data[CONF_SERVER_IDENTIFIER]) if MP_DOMAIN not in entry.options: options = dict(entry.options) options.setdefault(MP_DOMAIN, {}) hass.config_entries.async_update_entry(entry, options=options) plex_server = PlexServer( hass, server_config, entry.data[CONF_SERVER_IDENTIFIER], entry.options, entry.entry_id, ) try: await hass.async_add_executor_job(plex_server.connect) except ShouldUpdateConfigEntry: new_server_data = { **entry.data[PLEX_SERVER_CONFIG], CONF_URL: plex_server.url_in_use, CONF_SERVER: plex_server.friendly_name, } hass.config_entries.async_update_entry( entry, data={ **entry.data, PLEX_SERVER_CONFIG: new_server_data }) except requests.exceptions.ConnectionError as error: if entry.state != ENTRY_STATE_SETUP_RETRY: _LOGGER.error( "Plex server (%s) could not be reached: [%s]", server_config[CONF_URL], error, ) raise ConfigEntryNotReady from error except plexapi.exceptions.Unauthorized: hass.async_create_task( hass.config_entries.flow.async_init( PLEX_DOMAIN, context={CONF_SOURCE: SOURCE_REAUTH}, data=entry.data, )) _LOGGER.error( "Token not accepted, please reauthenticate Plex server '%s'", entry.data[CONF_SERVER], ) return False except ( plexapi.exceptions.BadRequest, plexapi.exceptions.NotFound, ) as error: _LOGGER.error( "Login to %s failed, verify token and SSL settings: [%s]", entry.data[CONF_SERVER], error, ) return False _LOGGER.debug("Connected to: %s (%s)", plex_server.friendly_name, plex_server.url_in_use) server_id = plex_server.machine_identifier hass.data[PLEX_DOMAIN][SERVERS][server_id] = plex_server hass.data[PLEX_DOMAIN][PLATFORMS_COMPLETED][server_id] = set() entry.add_update_listener(async_options_updated) unsub = async_dispatcher_connect( hass, PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id), plex_server.async_update_platforms, ) hass.data[PLEX_DOMAIN][DISPATCHERS].setdefault(server_id, []) hass.data[PLEX_DOMAIN][DISPATCHERS][server_id].append(unsub) @callback def plex_websocket_callback(signal, data, error): """Handle callbacks from plexwebsocket library.""" if signal == SIGNAL_CONNECTION_STATE: if data == STATE_CONNECTED: _LOGGER.debug("Websocket to %s successful", entry.data[CONF_SERVER]) elif data == STATE_DISCONNECTED: _LOGGER.debug("Websocket to %s disconnected, retrying", entry.data[CONF_SERVER]) # Stopped websockets without errors are expected during shutdown and ignored elif data == STATE_STOPPED and error: _LOGGER.error( "Websocket to %s failed, aborting [Error: %s]", entry.data[CONF_SERVER], error, ) hass.async_create_task( hass.config_entries.async_reload(entry.entry_id)) elif signal == SIGNAL_DATA: async_dispatcher_send( hass, PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id)) session = async_get_clientsession(hass) verify_ssl = server_config.get(CONF_VERIFY_SSL) websocket = PlexWebsocket( plex_server.plex_server, plex_websocket_callback, session=session, verify_ssl=verify_ssl, ) hass.data[PLEX_DOMAIN][WEBSOCKETS][server_id] = websocket def start_websocket_session(platform, _): hass.data[PLEX_DOMAIN][PLATFORMS_COMPLETED][server_id].add(platform) if hass.data[PLEX_DOMAIN][PLATFORMS_COMPLETED][server_id] == PLATFORMS: hass.loop.create_task(websocket.listen()) def close_websocket_session(_): websocket.close() unsub = hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, close_websocket_session) hass.data[PLEX_DOMAIN][DISPATCHERS][server_id].append(unsub) for platform in PLATFORMS: task = hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, platform)) task.add_done_callback( functools.partial(start_websocket_session, platform)) async def async_play_on_sonos_service(service_call): await hass.async_add_executor_job(play_on_sonos, hass, service_call) play_on_sonos_schema = vol.Schema({ vol.Required(ATTR_ENTITY_ID): cv.entity_id, vol.Required(ATTR_MEDIA_CONTENT_ID): str, vol.Optional(ATTR_MEDIA_CONTENT_TYPE): vol.In("music"), }) def get_plex_account(plex_server): try: return plex_server.account except (plexapi.exceptions.BadRequest, plexapi.exceptions.Unauthorized): return None plex_account = await hass.async_add_executor_job(get_plex_account, plex_server) if plex_account: hass.services.async_register( PLEX_DOMAIN, SERVICE_PLAY_ON_SONOS, async_play_on_sonos_service, schema=play_on_sonos_schema, ) return True
async def async_added_to_hass(self): """Register callbacks.""" self.async_on_remove( async_dispatcher_connect(self.hass, UPDATE_TOPIC, self.async_update_callback))