async def async_setup(hass, config): """Set up the asuswrt component.""" from aioasuswrt.asuswrt import AsusWrt conf = config[DOMAIN] api = AsusWrt(conf[CONF_HOST], conf.get(CONF_PORT), conf.get(CONF_PROTOCOL) == 'telnet', conf[CONF_USERNAME], conf.get(CONF_PASSWORD, ''), conf.get('ssh_key', conf.get('pub_key', '')), conf.get(CONF_MODE), conf.get(CONF_REQUIRE_IP)) await api.connection.async_connect() if not api.is_connected: _LOGGER.error("Unable to setup asuswrt component") return False hass.data[DATA_ASUSWRT] = api hass.async_create_task(async_load_platform( hass, 'sensor', DOMAIN, config[DOMAIN].get(CONF_SENSORS), config)) hass.async_create_task(async_load_platform( hass, 'device_tracker', DOMAIN, {}, config)) return True
def _setup_atv(hass, atv_config): """Setup an Apple TV.""" import pyatv name = atv_config.get(CONF_NAME) host = atv_config.get(CONF_HOST) login_id = atv_config.get(CONF_LOGIN_ID) start_off = atv_config.get(CONF_START_OFF) credentials = atv_config.get(CONF_CREDENTIALS) if host in hass.data[DATA_APPLE_TV]: return details = pyatv.AppleTVDevice(name, host, login_id) session = async_get_clientsession(hass) atv = pyatv.connect_to_apple_tv(details, hass.loop, session=session) if credentials: yield from atv.airplay.load_credentials(credentials) power = AppleTVPowerManager(hass, atv, start_off) hass.data[DATA_APPLE_TV][host] = { ATTR_ATV: atv, ATTR_POWER: power } hass.async_add_job(discovery.async_load_platform( hass, 'media_player', DOMAIN, atv_config)) hass.async_add_job(discovery.async_load_platform( hass, 'remote', DOMAIN, atv_config))
def async_setup(hass, config): """Set up the SPC platform.""" hass.data[DATA_REGISTRY] = SpcRegistry() api = SpcWebGateway(hass, config[DOMAIN].get(CONF_API_URL), config[DOMAIN].get(CONF_WS_URL)) hass.data[DATA_API] = api # add sensor devices for each zone (typically motion/fire/door sensors) zones = yield from api.get_zones() if zones: hass.async_add_job(discovery.async_load_platform( hass, 'binary_sensor', DOMAIN, {ATTR_DISCOVER_DEVICES: zones}, config)) # create a separate alarm panel for each area areas = yield from api.get_areas() if areas: hass.async_add_job(discovery.async_load_platform( hass, 'alarm_control_panel', DOMAIN, {ATTR_DISCOVER_AREAS: areas}, config)) # start listening for incoming events over websocket api.start_listener(_async_process_message, hass.data[DATA_REGISTRY]) return True
async def async_setup(hass, hass_config): """Create a Genius Hub system.""" from geniushubclient import GeniusHubClient # noqa; pylint: disable=no-name-in-module geniushub_data = hass.data[DOMAIN] = {} kwargs = dict(hass_config[DOMAIN]) if CONF_HOST in kwargs: args = (kwargs.pop(CONF_HOST), ) else: args = (kwargs.pop(CONF_TOKEN), ) try: client = geniushub_data['client'] = GeniusHubClient( *args, **kwargs, session=async_get_clientsession(hass) ) await client.hub.update() except AssertionError: # assert response.status == HTTP_OK _LOGGER.warning( "setup(): Failed, check your configuration.", exc_info=True) return False hass.async_create_task(async_load_platform( hass, 'climate', DOMAIN, {}, hass_config)) hass.async_create_task(async_load_platform( hass, 'water_heater', DOMAIN, {}, hass_config)) return True
async def async_setup(hass, config): """Set up the Ness Alarm platform.""" from nessclient import Client, ArmingState conf = config[DOMAIN] zones = conf[CONF_ZONES] host = conf[CONF_HOST] port = conf[CONF_DEVICE_PORT] scan_interval = conf[CONF_SCAN_INTERVAL] infer_arming_state = conf[CONF_INFER_ARMING_STATE] client = Client(host=host, port=port, loop=hass.loop, update_interval=scan_interval.total_seconds(), infer_arming_state=infer_arming_state) hass.data[DATA_NESS] = client async def _close(event): await client.close() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _close) hass.async_create_task( async_load_platform(hass, 'binary_sensor', DOMAIN, {CONF_ZONES: zones}, config)) hass.async_create_task( async_load_platform(hass, 'alarm_control_panel', DOMAIN, {}, config)) def on_zone_change(zone_id: int, state: bool): """Receives and propagates zone state updates.""" async_dispatcher_send(hass, SIGNAL_ZONE_CHANGED, ZoneChangedData( zone_id=zone_id, state=state, )) def on_state_change(arming_state: ArmingState): """Receives and propagates arming state updates.""" async_dispatcher_send(hass, SIGNAL_ARMING_STATE_CHANGED, arming_state) client.on_zone_change(on_zone_change) client.on_state_change(on_state_change) # Force update for current arming status and current zone states hass.loop.create_task(client.keepalive()) hass.loop.create_task(client.update()) async def handle_panic(call): await client.panic(call.data[ATTR_CODE]) async def handle_aux(call): await client.aux(call.data[ATTR_OUTPUT_ID], call.data[ATTR_STATE]) hass.services.async_register(DOMAIN, SERVICE_PANIC, handle_panic, schema=SERVICE_SCHEMA_PANIC) hass.services.async_register(DOMAIN, SERVICE_AUX, handle_aux, schema=SERVICE_SCHEMA_AUX) return True
async def async_setup(hass, config): """Set up an Utility Meter.""" component = EntityComponent(_LOGGER, DOMAIN, hass) hass.data[DATA_UTILITY] = {} register_services = False for meter, conf in config.get(DOMAIN).items(): _LOGGER.debug("Setup %s.%s", DOMAIN, meter) hass.data[DATA_UTILITY][meter] = conf if not conf[CONF_TARIFFS]: # only one entity is required hass.async_create_task(discovery.async_load_platform( hass, SENSOR_DOMAIN, DOMAIN, [{CONF_METER: meter, CONF_NAME: meter}], config)) else: # create tariff selection await component.async_add_entities([ TariffSelect(meter, list(conf[CONF_TARIFFS])) ]) hass.data[DATA_UTILITY][meter][CONF_TARIFF_ENTITY] =\ "{}.{}".format(DOMAIN, meter) # add one meter for each tariff tariff_confs = [] for tariff in conf[CONF_TARIFFS]: tariff_confs.append({ CONF_METER: meter, CONF_NAME: "{} {}".format(meter, tariff), CONF_TARIFF: tariff, }) hass.async_create_task(discovery.async_load_platform( hass, SENSOR_DOMAIN, DOMAIN, tariff_confs, config)) register_services = True if register_services: component.async_register_entity_service( SERVICE_RESET, SERVICE_METER_SCHEMA, 'async_reset_meters' ) component.async_register_entity_service( SERVICE_SELECT_TARIFF, SERVICE_SELECT_TARIFF_SCHEMA, 'async_select_tariff' ) component.async_register_entity_service( SERVICE_SELECT_NEXT_TARIFF, SERVICE_METER_SCHEMA, 'async_next_tariff' ) return True
async def async_setup(hass, config): """Set up Netgear LTE component.""" if DATA_KEY not in hass.data: websession = async_create_clientsession( hass, cookie_jar=aiohttp.CookieJar(unsafe=True)) hass.data[DATA_KEY] = LTEData(websession) async def delete_sms_handler(service): """Apply a service.""" host = service.data[ATTR_HOST] conf = {CONF_HOST: host} modem_data = hass.data[DATA_KEY].get_modem_data(conf) if not modem_data: _LOGGER.error( "%s: host %s unavailable", SERVICE_DELETE_SMS, host) return for sms_id in service.data[ATTR_SMS_ID]: await modem_data.modem.delete_sms(sms_id) hass.services.async_register( DOMAIN, SERVICE_DELETE_SMS, delete_sms_handler, schema=DELETE_SMS_SCHEMA) netgear_lte_config = config[DOMAIN] # Set up each modem tasks = [_setup_lte(hass, lte_conf) for lte_conf in netgear_lte_config] await asyncio.wait(tasks) # Load platforms for each modem for lte_conf in netgear_lte_config: # Notify for notify_conf in lte_conf[NOTIFY_DOMAIN]: discovery_info = { CONF_HOST: lte_conf[CONF_HOST], CONF_NAME: notify_conf.get(CONF_NAME), NOTIFY_DOMAIN: notify_conf, } hass.async_create_task(discovery.async_load_platform( hass, NOTIFY_DOMAIN, DOMAIN, discovery_info, config)) # Sensor sensor_conf = lte_conf.get(SENSOR_DOMAIN) discovery_info = { CONF_HOST: lte_conf[CONF_HOST], SENSOR_DOMAIN: sensor_conf, } hass.async_create_task(discovery.async_load_platform( hass, SENSOR_DOMAIN, DOMAIN, discovery_info, config)) return True
def async_setup(hass, base_config): """Set up the Lutron component.""" from pylutron_caseta.smartbridge import Smartbridge config = base_config.get(DOMAIN) keyfile = hass.config.path(config[CONF_KEYFILE]) certfile = hass.config.path(config[CONF_CERTFILE]) ca_certs = hass.config.path(config[CONF_CA_CERTS]) bridge = Smartbridge.create_tls(hostname=config[CONF_HOST], keyfile=keyfile, certfile=certfile, ca_certs=ca_certs) hass.data[LUTRON_CASETA_SMARTBRIDGE] = bridge yield from bridge.connect() if not hass.data[LUTRON_CASETA_SMARTBRIDGE].is_connected(): _LOGGER.error("Unable to connect to Lutron smartbridge at %s", config[CONF_HOST]) return False _LOGGER.info("Connected to Lutron smartbridge at %s", config[CONF_HOST]) for component in LUTRON_CASETA_COMPONENTS: hass.async_add_job(discovery.async_load_platform(hass, component, DOMAIN, {}, config)) return True
async def async_setup(hass, config): """Set up the Speedtest.net component.""" conf = config[DOMAIN] data = hass.data[DOMAIN] = SpeedtestData(hass, conf.get(CONF_SERVER_ID)) if not conf[CONF_MANUAL]: async_track_time_interval( hass, data.update, conf[CONF_SCAN_INTERVAL] ) def update(call=None): """Service call to manually update the data.""" data.update() hass.services.async_register(DOMAIN, 'speedtest', update) hass.async_create_task( async_load_platform( hass, SENSOR_DOMAIN, DOMAIN, conf[CONF_MONITORED_CONDITIONS], config ) ) return True
async def test_setup_platform_discover_client(hass): """Test setting up the platform from discovery.""" LOCATIONS.append({ 'locationName': 'Client 1', 'clientAddr': '1' }) LOCATIONS.append({ 'locationName': 'Client 2', 'clientAddr': '2' }) with MockDependency('DirectPy'), \ patch('DirectPy.DIRECTV', new=MockDirectvClass): await async_setup_component(hass, mp.DOMAIN, WORKING_CONFIG) await hass.async_block_till_done() hass.async_create_task( async_load_platform(hass, mp.DOMAIN, 'directv', DISCOVERY_INFO, {'media_player': {}}) ) await hass.async_block_till_done() del LOCATIONS[-1] del LOCATIONS[-1] state = hass.states.get(MAIN_ENTITY_ID) assert state state = hass.states.get('media_player.client_1') assert state state = hass.states.get('media_player.client_2') assert state assert len(hass.states.async_entity_ids('media_player')) == 3
async def async_setup(hass, hass_config): """Create an Intergas InComfort/Intouch system.""" incomfort_data = hass.data[DOMAIN] = {} credentials = dict(hass_config[DOMAIN]) hostname = credentials.pop(CONF_HOST) try: client = incomfort_data['client'] = InComfortGateway( hostname, **credentials, session=async_get_clientsession(hass) ) heater = incomfort_data['heater'] = list(await client.heaters)[0] await heater.update() except AssertionError: # assert response.status == HTTP_OK _LOGGER.warning( "Setup failed, check your configuration.", exc_info=True) return False hass.async_create_task(async_load_platform( hass, 'water_heater', DOMAIN, {}, hass_config)) return True
def new_service_found(service, info): """Handle a new service if one is found.""" if service in ignored_platforms: logger.info("Ignoring service: %s %s", service, info) return comp_plat = SERVICE_HANDLERS.get(service) # We do not know how to handle this service. if not comp_plat: return discovery_hash = json.dumps([service, info], sort_keys=True) if discovery_hash in already_discovered: return already_discovered.add(discovery_hash) logger.info("Found new service: %s %s", service, info) component, platform = comp_plat if platform is None: yield from async_discover(hass, service, info, component, config) else: yield from async_load_platform( hass, component, platform, info, config)
async def async_setup(hass, config): """Set up the Melissa Climate component.""" import melissa conf = config[DOMAIN] username = conf.get(CONF_USERNAME) password = conf.get(CONF_PASSWORD) api = melissa.AsyncMelissa(username=username, password=password) await api.async_connect() hass.data[DATA_MELISSA] = api hass.async_create_task( async_load_platform(hass, 'sensor', DOMAIN, {}, config)) hass.async_create_task( async_load_platform(hass, 'climate', DOMAIN, {}, config)) return True
def async_setup(hass, config): """Set up the KNX component.""" from xknx.exceptions import XKNXException try: hass.data[DATA_KNX] = KNXModule(hass, config) yield from hass.data[DATA_KNX].start() except XKNXException as ex: _LOGGER.warning("Can't connect to KNX interface: %s", ex) hass.components.persistent_notification.async_create( "Can't connect to KNX interface: <br>" "<b>{0}</b>".format(ex), title="KNX") for component, discovery_type in ( ('switch', 'Switch'), ('climate', 'Climate'), ('cover', 'Cover'), ('light', 'Light'), ('sensor', 'Sensor'), ('binary_sensor', 'BinarySensor'), ('notify', 'Notification')): found_devices = _get_devices(hass, discovery_type) hass.async_add_job( discovery.async_load_platform(hass, component, DOMAIN, { ATTR_DISCOVER_DEVICES: found_devices }, config)) hass.services.async_register( DOMAIN, SERVICE_KNX_SEND, hass.data[DATA_KNX].service_send_to_knx_bus, schema=SERVICE_KNX_SEND_SCHEMA) return True
async def async_setup(hass, config): """Set up the IQVIA component.""" hass.data[DOMAIN] = {} hass.data[DOMAIN][DATA_CLIENT] = {} hass.data[DOMAIN][DATA_LISTENER] = {} conf = config[DOMAIN] websession = aiohttp_client.async_get_clientsession(hass) try: iqvia = IQVIAData( Client(conf[CONF_ZIP_CODE], websession), conf[CONF_MONITORED_CONDITIONS]) await iqvia.async_update() except IQVIAError as err: _LOGGER.error('Unable to set up IQVIA: %s', err) return False hass.data[DOMAIN][DATA_CLIENT] = iqvia hass.async_create_task( async_load_platform(hass, 'sensor', DOMAIN, {}, config)) async def refresh(event_time): """Refresh IQVIA data.""" _LOGGER.debug('Updating IQVIA data') await iqvia.async_update() async_dispatcher_send(hass, TOPIC_DATA_UPDATE) hass.data[DOMAIN][DATA_LISTENER] = async_track_time_interval( hass, refresh, DEFAULT_SCAN_INTERVAL) return True
def _setup_gateway(hass, hass_config, host, key, allow_tradfri_groups): """Create a gateway.""" from pytradfri import Gateway, RequestError from pytradfri.api.libcoap_api import api_factory try: api = api_factory(host, key) except RequestError: return False gateway = Gateway() # pylint: disable=no-member gateway_id = api(gateway.get_gateway_info()).id hass.data.setdefault(KEY_API, {}) hass.data.setdefault(KEY_GATEWAY, {}) gateways = hass.data[KEY_GATEWAY] hass.data[KEY_API][gateway_id] = api hass.data.setdefault(KEY_TRADFRI_GROUPS, {}) tradfri_groups = hass.data[KEY_TRADFRI_GROUPS] tradfri_groups[gateway_id] = allow_tradfri_groups # Check if already set up if gateway_id in gateways: return True gateways[gateway_id] = gateway hass.async_add_job(discovery.async_load_platform( hass, 'light', DOMAIN, {'gateway': gateway_id}, hass_config)) return True
def async_device_message_received(topic, payload, qos): """Process the received message.""" match = TOPIC_MATCHER.match(topic) if not match: return prefix_topic, component, object_id = match.groups() try: payload = json.loads(payload) except ValueError: _LOGGER.warning("Unable to parse JSON %s: %s", object_id, payload) return if component not in SUPPORTED_COMPONENTS: _LOGGER.warning("Component %s is not supported", component) return payload = dict(payload) platform = payload.get(CONF_PLATFORM, 'mqtt') if platform not in ALLOWED_PLATFORMS.get(component, []): _LOGGER.warning("Platform %s (component %s) is not allowed", platform, component) return payload[CONF_PLATFORM] = platform if CONF_STATE_TOPIC not in payload: payload[CONF_STATE_TOPIC] = '{}/{}/{}/state'.format( discovery_topic, component, object_id) yield from async_load_platform( hass, component, platform, payload, hass_config)
def discover_vehicle(vehicle): """Load relevant platforms.""" data.vehicles.add(vehicle.vin) dashboard = vehicle.dashboard( mutable=config[DOMAIN][CONF_MUTABLE], scandinavian_miles=config[DOMAIN][CONF_SCANDINAVIAN_MILES]) for instrument in ( instrument for instrument in dashboard.instruments if instrument.component in COMPONENTS and is_enabled(instrument.slug_attr)): data.instruments.add(instrument) hass.async_create_task( discovery.async_load_platform( hass, COMPONENTS[instrument.component], DOMAIN, (vehicle.vin, instrument.component, instrument.attr), config))
async def connect(): """Set up connection and hook it into HA for reconnect/shutdown.""" _LOGGER.info('Initiating HLK-SW16 connection to %s', device) client = await create_hlk_sw16_connection( host=host, port=port, disconnect_callback=disconnected, reconnect_callback=reconnected, loop=hass.loop, timeout=CONNECTION_TIMEOUT, reconnect_interval=DEFAULT_RECONNECT_INTERVAL) hass.data[DATA_DEVICE_REGISTER][device] = client # Load platforms hass.async_create_task( async_load_platform(hass, 'switch', DOMAIN, (switches, device), config)) # handle shutdown of HLK-SW16 asyncio transport hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, lambda x: client.stop()) _LOGGER.info('Connected to HLK-SW16 device: %s', device)
async def async_setup(hass, config): """Set up the Google Home component.""" hass.data[DOMAIN] = {} hass.data[CLIENT] = GoogleHomeClient(hass) for device in config[DOMAIN][CONF_DEVICES]: hass.data[DOMAIN][device['host']] = {} hass.async_create_task( discovery.async_load_platform( hass, 'device_tracker', DOMAIN, device, config)) if device[CONF_TRACK_ALARMS]: hass.async_create_task( discovery.async_load_platform( hass, 'sensor', DOMAIN, device, config)) return True
async def new_lightpad(device): """Load light and binary sensor platforms when Lightpad detected.""" await asyncio.wait([ hass.async_create_task( discovery.async_load_platform( hass, 'light', DOMAIN, discovered=device, hass_config=conf)) ])
async def async_setup(self): """Set up a phue bridge based on host parameter.""" import aiohue api = aiohue.Bridge( self.host, username=self.username, websession=aiohttp_client.async_get_clientsession(self.hass) ) try: with async_timeout.timeout(5): # Initialize bridge and validate our username if not self.username: await api.create_user('home-assistant') await api.initialize() except (aiohue.LinkButtonNotPressed, aiohue.Unauthorized): _LOGGER.warning("Connected to Hue at %s but not registered.", self.host) self.async_request_configuration() return except (asyncio.TimeoutError, aiohue.RequestError): _LOGGER.error("Error connecting to the Hue bridge at %s", self.host) return except aiohue.AiohueException: _LOGGER.exception('Unknown Hue linking error occurred') self.async_request_configuration() return except Exception: # pylint: disable=broad-except _LOGGER.exception("Unknown error connecting with Hue bridge at %s", self.host) return self.hass.data[DOMAIN][self.host] = self # If we came here and configuring this host, mark as done if self.config_request_id: request_id = self.config_request_id self.config_request_id = None self.hass.components.configurator.async_request_done(request_id) self.username = api.username # Save config file await self.hass.async_add_job( save_json, self.hass.config.path(self.filename), {self.host: {'username': api.username}}) self.api = api self.hass.async_add_job(discovery.async_load_platform( self.hass, 'light', DOMAIN, {'host': self.host})) self.hass.services.async_register( DOMAIN, SERVICE_HUE_SCENE, self.hue_activate_scene, schema=SCENE_SCHEMA)
async def async_setup(hass, config): """Set up the habitica service.""" conf = config[DOMAIN] data = hass.data[DOMAIN] = {} websession = async_get_clientsession(hass) from habitipy.aio import HabitipyAsync class HAHabitipyAsync(HabitipyAsync): """Closure API class to hold session.""" def __call__(self, **kwargs): return super().__call__(websession, **kwargs) for instance in conf: url = instance[CONF_URL] username = instance[CONF_API_USER] password = instance[CONF_API_KEY] name = instance.get(CONF_NAME) config_dict = {"url": url, "login": username, "password": password} api = HAHabitipyAsync(config_dict) user = await api.user.get() if name is None: name = user['profile']['name'] data[name] = api if CONF_SENSORS in instance: hass.async_create_task( discovery.async_load_platform( hass, "sensor", DOMAIN, {"name": name, "sensors": instance[CONF_SENSORS]}, config)) async def handle_api_call(call): name = call.data[ATTR_NAME] path = call.data[ATTR_PATH] api = hass.data[DOMAIN].get(name) if api is None: _LOGGER.error( "API_CALL: User '%s' not configured", name) return try: for element in path: api = api[element] except KeyError: _LOGGER.error( "API_CALL: Path %s is invalid" " for api on '{%s}' element", path, element) return kwargs = call.data.get(ATTR_ARGS, {}) data = await api(**kwargs) hass.bus.async_fire(EVENT_API_CALL_SUCCESS, { "name": name, "path": path, "data": data }) hass.services.async_register( DOMAIN, SERVICE_API_CALL, handle_api_call, schema=SERVICE_API_CALL_SCHEMA) return True
async def async_setup(hass, config): """Set up the LCN component.""" import pypck from pypck.connection import PchkConnectionManager hass.data[DATA_LCN] = {} conf_connections = config[DOMAIN][CONF_CONNECTIONS] connections = [] for conf_connection in conf_connections: connection_name = conf_connection.get(CONF_NAME) settings = {'SK_NUM_TRIES': conf_connection[CONF_SK_NUM_TRIES], 'DIM_MODE': pypck.lcn_defs.OutputPortDimMode[ conf_connection[CONF_DIM_MODE]]} connection = PchkConnectionManager(hass.loop, conf_connection[CONF_HOST], conf_connection[CONF_PORT], conf_connection[CONF_USERNAME], conf_connection[CONF_PASSWORD], settings=settings, connection_id=connection_name) try: # establish connection to PCHK server await hass.async_create_task(connection.async_connect(timeout=15)) connections.append(connection) _LOGGER.info('LCN connected to "%s"', connection_name) except TimeoutError: _LOGGER.error('Connection to PCHK server "%s" failed.', connection_name) return False hass.data[DATA_LCN][CONF_CONNECTIONS] = connections hass.async_create_task( async_load_platform(hass, 'light', DOMAIN, config[DOMAIN][CONF_LIGHTS], config)) hass.async_create_task( async_load_platform(hass, 'switch', DOMAIN, config[DOMAIN][CONF_SWITCHES], config)) return True
async def async_setup(hass, config): """Try to start embedded Lightwave broker.""" from lightwave.lightwave import LWLink host = config[DOMAIN][CONF_HOST] hass.data[LIGHTWAVE_LINK] = LWLink(host) lights = config[DOMAIN][CONF_LIGHTS] if lights: hass.async_create_task(async_load_platform( hass, 'light', DOMAIN, lights, config)) switches = config[DOMAIN][CONF_SWITCHES] if switches: hass.async_create_task(async_load_platform( hass, 'switch', DOMAIN, switches, config)) return True
async def add_table(host, name=None): """Add platforms for a single table with the given hostname.""" table = await Table.connect(host, session) if name is None: name = table.name tables[name] = table _LOGGER.debug("Connected to %s at %s", name, host) hass.async_create_task(async_load_platform( hass, 'light', DOMAIN, { CONF_NAME: name, }, config )) hass.async_create_task(async_load_platform( hass, 'media_player', DOMAIN, { CONF_NAME: name, CONF_HOST: host, }, config ))
async def async_setup(hass, config): """Set up the SPC component.""" from pyspcwebgw import SpcWebGateway async def async_upate_callback(spc_object): from pyspcwebgw.area import Area from pyspcwebgw.zone import Zone if isinstance(spc_object, Area): async_dispatcher_send(hass, SIGNAL_UPDATE_ALARM.format(spc_object.id)) elif isinstance(spc_object, Zone): async_dispatcher_send(hass, SIGNAL_UPDATE_SENSOR.format(spc_object.id)) session = aiohttp_client.async_get_clientsession(hass) spc = SpcWebGateway(loop=hass.loop, session=session, api_url=config[DOMAIN].get(CONF_API_URL), ws_url=config[DOMAIN].get(CONF_WS_URL), async_callback=async_upate_callback) hass.data[DATA_API] = spc if not await spc.async_load_parameters(): _LOGGER.error('Failed to load area/zone information from SPC.') return False # add sensor devices for each zone (typically motion/fire/door sensors) hass.async_create_task(discovery.async_load_platform( hass, 'binary_sensor', DOMAIN, {ATTR_DISCOVER_DEVICES: spc.zones.values()}, config)) # create a separate alarm panel for each area hass.async_create_task(discovery.async_load_platform( hass, 'alarm_control_panel', DOMAIN, {ATTR_DISCOVER_AREAS: spc.areas.values()}, config)) # start listening for incoming events over websocket spc.start() return True
async def async_setup(hass, hass_config): """Set up the Geofency component.""" config = hass_config[DOMAIN] mobile_beacons = config[CONF_MOBILE_BEACONS] hass.data[DOMAIN] = [slugify(beacon) for beacon in mobile_beacons] hass.http.register_view(GeofencyView(hass.data[DOMAIN])) hass.async_create_task( async_load_platform(hass, 'device_tracker', DOMAIN, {}, hass_config) ) return True
async def async_setup(hass, config): """Set up the Sense sensor.""" from sense_energy import ASyncSenseable, SenseAuthenticationException username = config[DOMAIN][CONF_EMAIL] password = config[DOMAIN][CONF_PASSWORD] timeout = config[DOMAIN][CONF_TIMEOUT] try: hass.data[SENSE_DATA] = ASyncSenseable( api_timeout=timeout, wss_timeout=timeout) hass.data[SENSE_DATA].rate_limit = ACTIVE_UPDATE_RATE await hass.data[SENSE_DATA].authenticate(username, password) except SenseAuthenticationException: _LOGGER.error("Could not authenticate with sense server") return False hass.async_create_task( async_load_platform(hass, 'sensor', DOMAIN, None, config)) hass.async_create_task( async_load_platform(hass, 'binary_sensor', DOMAIN, None, config)) return True
async def async_setup_entry(hass, entry): """Load a config entry. Validate and save sessions per aws credential. """ config = hass.data.get(DATA_HASS_CONFIG) conf = hass.data.get(DATA_CONFIG) if entry.source == config_entries.SOURCE_IMPORT: if conf is None: # user removed config from configuration.yaml, abort setup hass.async_create_task( hass.config_entries.async_remove(entry.entry_id) ) return False if conf != entry.data: # user changed config from configuration.yaml, use conf to setup hass.config_entries.async_update_entry(entry, data=conf) if conf is None: conf = CONFIG_SCHEMA({DOMAIN: entry.data})[DOMAIN] # validate credentials and create sessions validation = True tasks = [] for cred in conf[ATTR_CREDENTIALS]: tasks.append(_validate_aws_credentials(hass, cred)) if tasks: results = await asyncio.gather(*tasks, return_exceptions=True) for index, result in enumerate(results): name = conf[ATTR_CREDENTIALS][index][CONF_NAME] if isinstance(result, Exception): _LOGGER.error( "Validating credential [%s] failed: %s", name, result, exc_info=result, ) validation = False else: hass.data[DATA_SESSIONS][name] = result # set up notify platform, no entry support for notify component yet, # have to use discovery to load platform. for notify_config in conf[CONF_NOTIFY]: hass.async_create_task( discovery.async_load_platform( hass, "notify", DOMAIN, notify_config, config ) ) return validation
async def update_devices(login_obj): """Ping Alexa API to identify all devices, bluetooth, and last called device. This will add new devices and services when discovered. By default this runs every SCAN_INTERVAL seconds unless another method calls it. if websockets is connected, it will return immediately unless 'new_devices' has been set to True. While throttled at MIN_TIME_BETWEEN_SCANS, care should be taken to reduce the number of runs to avoid flooding. Slow changing states should be checked here instead of in spawned components like media_player since this object is one per account. Each AlexaAPI call generally results in two webpage requests. """ from alexapy import AlexaAPI email: Text = login_obj.email if email not in hass.data[DATA_ALEXAMEDIA]["accounts"]: return existing_serials = _existing_serials() existing_entities = hass.data[DATA_ALEXAMEDIA]["accounts"][email][ "entities"]["media_player"].values() if ("websocket" in hass.data[DATA_ALEXAMEDIA]["accounts"][email] and hass.data[DATA_ALEXAMEDIA]["accounts"][email]["websocket"] and not (hass.data[DATA_ALEXAMEDIA]["accounts"][email] ["new_devices"])): return hass.data[DATA_ALEXAMEDIA]["accounts"][email]["new_devices"] = False try: auth_info = await AlexaAPI.get_authentication(login_obj) devices = await AlexaAPI.get_devices(login_obj) bluetooth = await AlexaAPI.get_bluetooth(login_obj) preferences = await AlexaAPI.get_device_preferences(login_obj) dnd = await AlexaAPI.get_dnd_state(login_obj) raw_notifications = await AlexaAPI.get_notifications(login_obj) _LOGGER.debug( "%s: Found %s devices, %s bluetooth", hide_email(email), len(devices) if devices is not None else "", len(bluetooth) if bluetooth is not None else "", ) if (devices is None or bluetooth is None) and not (hass.data[ DATA_ALEXAMEDIA]["accounts"][email]["configurator"]): raise AlexapyLoginError() except (AlexapyLoginError, RuntimeError): _LOGGER.debug("%s: Alexa API disconnected; attempting to relogin", hide_email(email)) await login_obj.login_with_cookie() await test_login_status(hass, config_entry, login_obj, setup_platform_callback) return new_alexa_clients = [] # list of newly discovered device names exclude_filter = [] include_filter = [] for device in devices: if include and device["accountName"] not in include: include_filter.append(device["accountName"]) if "appDeviceList" in device: for app in device["appDeviceList"]: (hass.data[DATA_ALEXAMEDIA]["accounts"][email] ["excluded"][app["serialNumber"]]) = device (hass.data[DATA_ALEXAMEDIA]["accounts"][email]["excluded"][ device["serialNumber"]]) = device continue elif exclude and device["accountName"] in exclude: exclude_filter.append(device["accountName"]) if "appDeviceList" in device: for app in device["appDeviceList"]: (hass.data[DATA_ALEXAMEDIA]["accounts"][email] ["excluded"][app["serialNumber"]]) = device (hass.data[DATA_ALEXAMEDIA]["accounts"][email]["excluded"][ device["serialNumber"]]) = device continue if "bluetoothStates" in bluetooth: for b_state in bluetooth["bluetoothStates"]: if device["serialNumber"] == b_state["deviceSerialNumber"]: device["bluetooth_state"] = b_state if "devicePreferences" in preferences: for dev in preferences["devicePreferences"]: if dev["deviceSerialNumber"] == device["serialNumber"]: device["locale"] = dev["locale"] device["timeZoneId"] = dev["timeZoneId"] _LOGGER.debug( "Locale %s timezone %s found for %s", device["locale"], device["timeZoneId"], hide_serial(device["serialNumber"]), ) if "doNotDisturbDeviceStatusList" in dnd: for dev in dnd["doNotDisturbDeviceStatusList"]: if dev["deviceSerialNumber"] == device["serialNumber"]: device["dnd"] = dev["enabled"] _LOGGER.debug( "DND %s found for %s", device["dnd"], hide_serial(device["serialNumber"]), ) device["auth_info"] = auth_info (hass.data[DATA_ALEXAMEDIA]["accounts"][email]["devices"] ["media_player"][device["serialNumber"]]) = device if device["serialNumber"] not in existing_serials: new_alexa_clients.append(device["accountName"]) _LOGGER.debug( "%s: Existing: %s New: %s;" " Filtered out by not being in include: %s " "or in exclude: %s", hide_email(email), list(existing_entities), new_alexa_clients, include_filter, exclude_filter, ) if new_alexa_clients: cleaned_config = config.copy() cleaned_config.pop(CONF_PASSWORD, None) # CONF_PASSWORD contains sensitive info which is no longer needed for component in ALEXA_COMPONENTS: if component == "notify": hass.async_create_task( async_load_platform( hass, component, DOMAIN, { CONF_NAME: DOMAIN, "config": cleaned_config }, config, )) else: hass.async_add_job( hass.config_entries.async_forward_entry_setup( config_entry, component)) await process_notifications(login_obj, raw_notifications) # Process last_called data to fire events await update_last_called(login_obj) async_call_later( hass, scan_interval, lambda _: hass.async_create_task( update_devices(login_obj, no_throttle=True)), )
def async_setup(hass, config): """Set up for Envisalink devices.""" from pyenvisalink import EnvisalinkAlarmPanel conf = config.get(DOMAIN) host = conf.get(CONF_EVL_HOST) port = conf.get(CONF_EVL_PORT) code = conf.get(CONF_CODE) panel_type = conf.get(CONF_PANEL_TYPE) panic_type = conf.get(CONF_PANIC) version = conf.get(CONF_EVL_VERSION) user = conf.get(CONF_USERNAME) password = conf.get(CONF_PASS) keep_alive = conf.get(CONF_EVL_KEEPALIVE) zone_dump = conf.get(CONF_ZONEDUMP_INTERVAL) zones = conf.get(CONF_ZONES) partitions = conf.get(CONF_PARTITIONS) sync_connect = asyncio.Future(loop=hass.loop) controller = EnvisalinkAlarmPanel( host, port, panel_type, version, user, password, zone_dump, keep_alive, hass.loop) hass.data[DATA_EVL] = controller @callback def login_fail_callback(data): """Handle when the evl rejects our login.""" _LOGGER.error("The Envisalink rejected your credentials") sync_connect.set_result(False) @callback def connection_fail_callback(data): """Network failure callback.""" _LOGGER.error("Could not establish a connection with the Envisalink") sync_connect.set_result(False) @callback def connection_success_callback(data): """Handle a successful connection.""" _LOGGER.info("Established a connection with the Envisalink") hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_envisalink) sync_connect.set_result(True) @callback def zones_updated_callback(data): """Handle zone timer updates.""" _LOGGER.info("Envisalink sent a zone update event. Updating zones...") async_dispatcher_send(hass, SIGNAL_ZONE_UPDATE, data) @callback def alarm_data_updated_callback(data): """Handle non-alarm based info updates.""" _LOGGER.info("Envisalink sent new alarm info. Updating alarms...") async_dispatcher_send(hass, SIGNAL_KEYPAD_UPDATE, data) @callback def partition_updated_callback(data): """Handle partition changes thrown by evl (including alarms).""" _LOGGER.info("The envisalink sent a partition update event") async_dispatcher_send(hass, SIGNAL_PARTITION_UPDATE, data) @callback def stop_envisalink(event): """Shutdown envisalink connection and thread on exit.""" _LOGGER.info("Shutting down Envisalink") controller.stop() controller.callback_zone_timer_dump = zones_updated_callback controller.callback_zone_state_change = zones_updated_callback controller.callback_partition_state_change = partition_updated_callback controller.callback_keypad_update = alarm_data_updated_callback controller.callback_login_failure = login_fail_callback controller.callback_login_timeout = connection_fail_callback controller.callback_login_success = connection_success_callback _LOGGER.info("Start envisalink.") controller.start() result = yield from sync_connect if not result: return False # Load sub-components for Envisalink if partitions: hass.async_add_job(async_load_platform( hass, 'alarm_control_panel', 'envisalink', { CONF_PARTITIONS: partitions, CONF_CODE: code, CONF_PANIC: panic_type }, config )) hass.async_add_job(async_load_platform( hass, 'sensor', 'envisalink', { CONF_PARTITIONS: partitions, CONF_CODE: code }, config )) if zones: hass.async_add_job(async_load_platform( hass, 'binary_sensor', 'envisalink', { CONF_ZONES: zones }, config )) return True
def _load_platform(hass, config, entity_type, entity_info_list): """Load platform with list of entity info.""" hass.async_create_task( async_load_platform(hass, entity_type, DOMAIN, entity_info_list, config))
async def async_modbus_setup( hass, config, service_write_register_schema, service_write_coil_schema ): """Set up Modbus component.""" hass.data[DOMAIN] = hub_collect = {} for conf_hub in config[DOMAIN]: my_hub = ModbusHub(hass, conf_hub) hub_collect[conf_hub[CONF_NAME]] = my_hub # modbus needs to be activated before components are loaded # to avoid a racing problem if not await my_hub.async_setup(): return False # load platforms for component, conf_key in PLATFORMS: if conf_key in conf_hub: hass.async_create_task( async_load_platform(hass, component, DOMAIN, conf_hub, config) ) async def async_stop_modbus(event): """Stop Modbus service.""" for client in hub_collect.values(): await client.async_close() del client hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, async_stop_modbus) async def async_write_register(service): """Write Modbus registers.""" unit = int(float(service.data[ATTR_UNIT])) address = int(float(service.data[ATTR_ADDRESS])) value = service.data[ATTR_VALUE] client_name = ( service.data[ATTR_HUB] if ATTR_HUB in service.data else DEFAULT_HUB ) if isinstance(value, list): await hub_collect[client_name].async_pymodbus_call( unit, address, [int(float(i)) for i in value], CALL_TYPE_WRITE_REGISTERS ) else: await hub_collect[client_name].async_pymodbus_call( unit, address, int(float(value)), CALL_TYPE_WRITE_REGISTER ) hass.services.async_register( DOMAIN, SERVICE_WRITE_REGISTER, async_write_register, schema=service_write_register_schema, ) async def async_write_coil(service): """Write Modbus coil.""" unit = service.data[ATTR_UNIT] address = service.data[ATTR_ADDRESS] state = service.data[ATTR_STATE] client_name = ( service.data[ATTR_HUB] if ATTR_HUB in service.data else DEFAULT_HUB ) if isinstance(state, list): await hub_collect[client_name].async_pymodbus_call( unit, address, state, CALL_TYPE_WRITE_COILS ) else: await hub_collect[client_name].async_pymodbus_call( unit, address, state, CALL_TYPE_WRITE_COIL ) hass.services.async_register( DOMAIN, SERVICE_WRITE_COIL, async_write_coil, schema=service_write_coil_schema ) return True
async def async_setup(hass, config): """Set up the Eight Sleep component.""" conf = config.get(DOMAIN) user = conf.get(CONF_USERNAME) password = conf.get(CONF_PASSWORD) partner = conf.get(CONF_PARTNER) if hass.config.time_zone is None: _LOGGER.error("Timezone is not set in Home Assistant.") return False timezone = hass.config.time_zone eight = EightSleep(user, password, timezone, partner, None, hass.loop) hass.data[DATA_EIGHT] = eight # Authenticate, build sensors success = await eight.start() if not success: # Authentication failed, cannot continue return False async def async_update_heat_data(now): """Update heat data from eight in HEAT_SCAN_INTERVAL.""" await eight.update_device_data() async_dispatcher_send(hass, SIGNAL_UPDATE_HEAT) async_track_point_in_utc_time(hass, async_update_heat_data, utcnow() + HEAT_SCAN_INTERVAL) async def async_update_user_data(now): """Update user data from eight in USER_SCAN_INTERVAL.""" await eight.update_user_data() async_dispatcher_send(hass, SIGNAL_UPDATE_USER) async_track_point_in_utc_time(hass, async_update_user_data, utcnow() + USER_SCAN_INTERVAL) await async_update_heat_data(None) await async_update_user_data(None) # Load sub components sensors = [] binary_sensors = [] if eight.users: for user in eight.users: obj = eight.users[user] for sensor in SENSORS: sensors.append(f"{obj.side}_{sensor}") binary_sensors.append(f"{obj.side}_presence") sensors.append("room_temp") else: # No users, cannot continue return False hass.async_create_task( discovery.async_load_platform(hass, "sensor", DOMAIN, {CONF_SENSORS: sensors}, config)) hass.async_create_task( discovery.async_load_platform(hass, "binary_sensor", DOMAIN, {CONF_BINARY_SENSORS: binary_sensors}, config)) async def async_service_handler(service): """Handle eight sleep service calls.""" params = service.data.copy() sensor = params.pop(ATTR_ENTITY_ID, None) target = params.pop(ATTR_TARGET_HEAT, None) duration = params.pop(ATTR_HEAT_DURATION, 0) for sens in sensor: side = sens.split("_")[1] userid = eight.fetch_userid(side) usrobj = eight.users[userid] await usrobj.set_heating_level(target, duration) async_dispatcher_send(hass, SIGNAL_UPDATE_HEAT) # Register services hass.services.async_register(DOMAIN, SERVICE_HEAT_SET, async_service_handler, schema=SERVICE_EIGHT_SCHEMA) async def stop_eight(event): """Handle stopping eight api session.""" await eight.stop() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_eight) return True
def _discover_mysensors_platform(hass, hass_config, platform, new_devices): """Discover a MySensors platform.""" task = hass.async_create_task(discovery.async_load_platform( hass, platform, DOMAIN, {ATTR_DEVICES: new_devices, CONF_NAME: DOMAIN}, hass_config)) return task
async def async_setup(hass, config): """setup loxone""" try: lox_config = loxApp() lox_config.lox_user = config[DOMAIN][CONF_USERNAME] lox_config.lox_pass = config[DOMAIN][CONF_PASSWORD] lox_config.host = config[DOMAIN][CONF_HOST] lox_config.port = config[DOMAIN][CONF_PORT] request_code = await lox_config.getJson() if request_code == 200 or request_code == "200": hass.data[DOMAIN] = config[DOMAIN] hass.data[DOMAIN]['loxconfig'] = lox_config.json for platform in LOXONE_PLATFORMS: _LOGGER.debug("starting loxone {}...".format(platform)) hass.async_create_task( async_load_platform(hass, platform, DOMAIN, {}, config)) del lox_config else: _LOGGER.error("unable to connect to Loxone") except ConnectionError: _LOGGER.error("unable to connect to Loxone") return False lox = LoxWs(user=config[DOMAIN][CONF_USERNAME], password=config[DOMAIN][CONF_PASSWORD], host=config[DOMAIN][CONF_HOST], port=config[DOMAIN][CONF_PORT], loxconfig=config[DOMAIN]['loxconfig']) async def message_callback(message): hass.bus.async_fire(EVENT, message) async def start_loxone(event): await lox.start() async def stop_loxone(event): _ = await lox.stop() _LOGGER.debug(_) async def loxone_discovered(event): if "component" in event.data: if event.data['component'] == DOMAIN: try: _LOGGER.info("loxone discovered") await asyncio.sleep(0.1) # await asyncio.sleep(0) entity_ids = hass.states.async_all() sensors_analog = [] sensors_digital = [] switches = [] covers = [] lights = [] for s in entity_ids: s_dict = s.as_dict() attr = s_dict['attributes'] if "plattform" in attr and \ attr['plattform'] == DOMAIN: if attr['device_typ'] == "analog_sensor": sensors_analog.append(s_dict['entity_id']) elif attr['device_typ'] == "digital_sensor": sensors_digital.append(s_dict['entity_id']) elif attr['device_typ'] == "Jalousie" or \ attr['device_typ'] == "Gate": covers.append(s_dict['entity_id']) elif attr['device_typ'] == "Switch" or \ attr['device_typ'] == "Pushbutton" or \ attr['device_typ'] == "TimedSwitch": switches.append(s_dict['entity_id']) elif attr['device_typ'] == "LightControllerV2" or \ attr['device_typ'] == "Dimmer": lights.append(s_dict['entity_id']) sensors_analog.sort() sensors_digital.sort() covers.sort() switches.sort() lights.sort() async def create_loxone_group(object_id, name, entity_names, visible=True, view=False): if visible: visiblity = "true" else: visiblity = "false" if view: view_state = "true" else: view_state = "false" command = { "object_id": object_id, "entities": entity_names, "name": name } await hass.services.async_call("group", "set", command) await create_loxone_group("loxone_analog", "Loxone Analog Sensors", sensors_analog, True, False) await create_loxone_group("loxone_digital", "Loxone Digital Sensors", sensors_digital, True, False) await create_loxone_group("loxone_switches", "Loxone Switches", switches, True, False) await create_loxone_group("loxone_covers", "Loxone Covers", covers, True, False) await create_loxone_group("loxone_lights", "Loxone Lights", lights, True, False) await create_loxone_group("loxone_group", "Loxone Group", [ "group.loxone_analog", "group.loxone_digital", "group.loxone_switches", "group.loxone_covers", "group.loxone_lights", "group.loxone_dimmers" ], True, True) except: traceback.print_exc() res = False try: res = await lox.async_init() except ConnectionError: _LOGGER.error("Connection Error") if res is True: lox.message_call_back = message_callback hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, start_loxone) hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_loxone) hass.bus.async_listen_once(EVENT_COMPONENT_LOADED, loxone_discovered) async def listen_loxone_send(event): """Listen for change Events from Loxone Components""" try: if event.event_type == SENDDOMAIN and isinstance( event.data, dict): value = event.data.get(ATTR_VALUE, DEFAULT) device_uuid = event.data.get(ATTR_UUID, DEFAULT) await lox.send_websocket_command(device_uuid, value) elif event.event_type == SECUREDSENDDOMAIN and isinstance( event.data, dict): value = event.data.get(ATTR_VALUE, DEFAULT) device_uuid = event.data.get(ATTR_UUID, DEFAULT) code = event.data.get(ATTR_CODE, DEFAULT) await lox.send_secured__websocket_command( device_uuid, value, code) except ValueError: traceback.print_exc() hass.bus.async_listen(SENDDOMAIN, listen_loxone_send) hass.bus.async_listen(SECUREDSENDDOMAIN, listen_loxone_send) async def handle_websocket_command(call): """Handle websocket command services.""" value = call.data.get(ATTR_VALUE, DEFAULT) device_uuid = call.data.get(ATTR_UUID, DEFAULT) await lox.send_websocket_command(device_uuid, value) hass.services.async_register(DOMAIN, 'event_websocket_command', handle_websocket_command) else: res = False _LOGGER.info("Error") return res
async def async_setup(hass, config): """Set up an Utility Meter.""" component = EntityComponent(_LOGGER, DOMAIN, hass) hass.data[DATA_UTILITY] = {} register_services = False for meter, conf in config.get(DOMAIN).items(): _LOGGER.debug("Setup %s.%s", DOMAIN, meter) hass.data[DATA_UTILITY][meter] = conf hass.data[DATA_UTILITY][meter][DATA_TARIFF_SENSORS] = [] if not conf[CONF_TARIFFS]: # only one entity is required hass.async_create_task( discovery.async_load_platform( hass, SENSOR_DOMAIN, DOMAIN, [{CONF_METER: meter, CONF_NAME: conf.get(CONF_NAME, meter)}], config, ) ) else: # create tariff selection await component.async_add_entities( [TariffSelect(meter, list(conf[CONF_TARIFFS]))] ) hass.data[DATA_UTILITY][meter][CONF_TARIFF_ENTITY] = "{}.{}".format( DOMAIN, meter ) # add one meter for each tariff tariff_confs = [] for tariff in conf[CONF_TARIFFS]: tariff_confs.append( { CONF_METER: meter, CONF_NAME: f"{meter} {tariff}", CONF_TARIFF: tariff, } ) hass.async_create_task( discovery.async_load_platform( hass, SENSOR_DOMAIN, DOMAIN, tariff_confs, config ) ) register_services = True if register_services: component.async_register_entity_service(SERVICE_RESET, {}, "async_reset_meters") component.async_register_entity_service( SERVICE_SELECT_TARIFF, {vol.Required(ATTR_TARIFF): cv.string}, "async_select_tariff", ) component.async_register_entity_service( SERVICE_SELECT_NEXT_TARIFF, {}, "async_next_tariff" ) return True
async def async_modbus_setup( hass: HomeAssistant, config: ConfigType, ) -> bool: """Set up Modbus component.""" hass.data[DOMAIN] = hub_collect = {} for conf_hub in config[DOMAIN]: my_hub = ModbusHub(hass, conf_hub) hub_collect[conf_hub[CONF_NAME]] = my_hub # modbus needs to be activated before components are loaded # to avoid a racing problem if not await my_hub.async_setup(): return False # load platforms for component, conf_key in PLATFORMS: if conf_key in conf_hub: hass.async_create_task( async_load_platform(hass, component, DOMAIN, conf_hub, config)) async def async_stop_modbus(event: Event) -> None: """Stop Modbus service.""" async_dispatcher_send(hass, SIGNAL_STOP_ENTITY) for client in hub_collect.values(): await client.async_close() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, async_stop_modbus) async def async_write_register(service: ServiceCall) -> None: """Write Modbus registers.""" unit = int(float(service.data[ATTR_UNIT])) address = int(float(service.data[ATTR_ADDRESS])) value = service.data[ATTR_VALUE] hub = hub_collect[service.data[ATTR_HUB] if ATTR_HUB in service.data else DEFAULT_HUB] if isinstance(value, list): await hub.async_pymodbus_call(unit, address, [int(float(i)) for i in value], CALL_TYPE_WRITE_REGISTERS) else: await hub.async_pymodbus_call(unit, address, int(float(value)), CALL_TYPE_WRITE_REGISTER) async def async_write_coil(service: ServiceCall) -> None: """Write Modbus coil.""" unit = service.data[ATTR_UNIT] address = service.data[ATTR_ADDRESS] state = service.data[ATTR_STATE] hub = hub_collect[service.data[ATTR_HUB] if ATTR_HUB in service.data else DEFAULT_HUB] if isinstance(state, list): await hub.async_pymodbus_call(unit, address, state, CALL_TYPE_WRITE_COILS) else: await hub.async_pymodbus_call(unit, address, state, CALL_TYPE_WRITE_COIL) for x_write in ( (SERVICE_WRITE_REGISTER, async_write_register, ATTR_VALUE, cv.positive_int), (SERVICE_WRITE_COIL, async_write_coil, ATTR_STATE, cv.boolean), ): hass.services.async_register( DOMAIN, x_write[0], x_write[1], schema=vol.Schema({ vol.Optional(ATTR_HUB, default=DEFAULT_HUB): cv.string, vol.Required(ATTR_UNIT): cv.positive_int, vol.Required(ATTR_ADDRESS): cv.positive_int, vol.Required(x_write[2]): vol.Any(cv.positive_int, vol.All(cv.ensure_list, [x_write[3]])), }), ) async def async_stop_hub(service: ServiceCall) -> None: """Stop Modbus hub.""" async_dispatcher_send(hass, SIGNAL_STOP_ENTITY) hub = hub_collect[service.data[ATTR_HUB]] await hub.async_close() async def async_restart_hub(service: ServiceCall) -> None: """Restart Modbus hub.""" async_dispatcher_send(hass, SIGNAL_START_ENTITY) hub = hub_collect[service.data[ATTR_HUB]] await hub.async_restart() for x_service in ( (SERVICE_STOP, async_stop_hub), (SERVICE_RESTART, async_restart_hub), ): hass.services.async_register( DOMAIN, x_service[0], x_service[1], schema=vol.Schema({vol.Required(ATTR_HUB): cv.string}), ) return True
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the zodiac component.""" hass.async_create_task( async_load_platform(hass, "sensor", DOMAIN, {}, config)) return True
def async_device_initialized(self, device, join): """Handle device joined and basic information discovered (async).""" import zigpy.profiles import homeassistant.components.zha.const as zha_const zha_const.populate_data() for endpoint_id, endpoint in device.endpoints.items(): if endpoint_id == 0: # ZDO continue discovered_info = yield from _discover_endpoint_info(endpoint) component = None profile_clusters = ([], []) device_key = "{}-{}".format(device.ieee, endpoint_id) node_config = self._config[DOMAIN][CONF_DEVICE_CONFIG].get( device_key, {}) if endpoint.profile_id in zigpy.profiles.PROFILES: profile = zigpy.profiles.PROFILES[endpoint.profile_id] if zha_const.DEVICE_CLASS.get(endpoint.profile_id, {}).get(endpoint.device_type, None): profile_clusters = profile.CLUSTERS[endpoint.device_type] profile_info = zha_const.DEVICE_CLASS[endpoint.profile_id] component = profile_info[endpoint.device_type] if ha_const.CONF_TYPE in node_config: component = node_config[ha_const.CONF_TYPE] profile_clusters = zha_const.COMPONENT_CLUSTERS[component] if component: in_clusters = [endpoint.in_clusters[c] for c in profile_clusters[0] if c in endpoint.in_clusters] out_clusters = [endpoint.out_clusters[c] for c in profile_clusters[1] if c in endpoint.out_clusters] discovery_info = { 'application_listener': self, 'endpoint': endpoint, 'in_clusters': {c.cluster_id: c for c in in_clusters}, 'out_clusters': {c.cluster_id: c for c in out_clusters}, 'new_join': join, 'unique_id': device_key, } discovery_info.update(discovered_info) self._hass.data[DISCOVERY_KEY][device_key] = discovery_info yield from discovery.async_load_platform( self._hass, component, DOMAIN, {'discovery_key': device_key}, self._config, ) for cluster_id, cluster in endpoint.in_clusters.items(): cluster_type = type(cluster) if cluster_id in profile_clusters[0]: continue if cluster_type not in zha_const.SINGLE_CLUSTER_DEVICE_CLASS: continue component = zha_const.SINGLE_CLUSTER_DEVICE_CLASS[cluster_type] cluster_key = "{}-{}".format(device_key, cluster_id) discovery_info = { 'application_listener': self, 'endpoint': endpoint, 'in_clusters': {cluster.cluster_id: cluster}, 'out_clusters': {}, 'new_join': join, 'unique_id': cluster_key, 'entity_suffix': '_{}'.format(cluster_id), } discovery_info.update(discovered_info) self._hass.data[DISCOVERY_KEY][cluster_key] = discovery_info yield from discovery.async_load_platform( self._hass, component, DOMAIN, {'discovery_key': cluster_key}, self._config, )
async def async_setup(hass, config): """Set up the GreenEye Monitor component.""" monitors = Monitors() hass.data[DATA_GREENEYE_MONITOR] = monitors server_config = config[DOMAIN] server = await monitors.start_server(server_config[CONF_PORT]) async def close_server(*args): """Close the monitoring server.""" await server.close() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, close_server) all_sensors = [] for monitor_config in server_config[CONF_MONITORS]: monitor_serial_number = { CONF_MONITOR_SERIAL_NUMBER: monitor_config[CONF_SERIAL_NUMBER] } channel_configs = monitor_config[CONF_CHANNELS] for channel_config in channel_configs: all_sensors.append({ CONF_SENSOR_TYPE: SENSOR_TYPE_CURRENT, **monitor_serial_number, **channel_config, }) voltage_configs = monitor_config[CONF_VOLTAGE_SENSORS] for voltage_config in voltage_configs: all_sensors.append({ CONF_SENSOR_TYPE: SENSOR_TYPE_VOLTAGE, **monitor_serial_number, **voltage_config, }) sensor_configs = monitor_config[CONF_TEMPERATURE_SENSORS] if sensor_configs: temperature_unit = { CONF_TEMPERATURE_UNIT: sensor_configs[CONF_TEMPERATURE_UNIT] } for sensor_config in sensor_configs[CONF_SENSORS]: all_sensors.append({ CONF_SENSOR_TYPE: SENSOR_TYPE_TEMPERATURE, **monitor_serial_number, **temperature_unit, **sensor_config, }) counter_configs = monitor_config[CONF_PULSE_COUNTERS] for counter_config in counter_configs: all_sensors.append({ CONF_SENSOR_TYPE: SENSOR_TYPE_PULSE_COUNTER, **monitor_serial_number, **counter_config, }) if not all_sensors: _LOGGER.error("Configuration must specify at least one " "channel, voltage, pulse counter or temperature sensor") return False hass.async_create_task( async_load_platform(hass, "sensor", DOMAIN, all_sensors, config)) return True
async def async_setup(hass: HomeAssistantType, hass_config: dict): session = async_get_clientsession(hass) hass.data[DOMAIN] = registry = EWeLinkRegistry(session) config = hass_config[DOMAIN] # init debug if needed if config[CONF_DEBUG]: debug = utils.SonoffDebug(hass) _LOGGER.setLevel(logging.DEBUG) _LOGGER.addHandler(debug) info = await hass.helpers.system_info.async_get_system_info() info.pop('installation_type', None) # fix HA v0.109.6 info.pop('timezone') _LOGGER.debug(f"SysInfo: {info}") # main init phase mode = config[CONF_MODE] _LOGGER.debug(f"{mode.upper()} mode start") cachefile = hass.config.path('.sonoff.json') registry.cache_load_devices(cachefile) has_credentials = CONF_USERNAME in config and CONF_PASSWORD in config # in mode=local with reload=once - do not connect to cloud servers local_once = (mode == 'local' and config[CONF_RELOAD] == 'once' and registry.devices) if has_credentials and not local_once: if await registry.cloud_login(config[CONF_USERNAME], config[CONF_PASSWORD]): await registry.cloud_load_devices(cachefile) else: _LOGGER.warning("Can't connect to eWeLink Cloud") # don't start the cloud below has_credentials = False elif mode == 'cloud': _LOGGER.error("For cloud mode login / password required") return False confdevices = config.get(CONF_DEVICES) if confdevices: registry.concat_devices(confdevices) default_class = config[CONF_DEFAULT_CLASS] utils.init_device_class(default_class) # List of attributes that invoke force_update if CONF_FORCE_UPDATE in config: force_update = set(config[CONF_FORCE_UPDATE]) _LOGGER.debug(f"Init force_update for attributes: {force_update}") else: force_update = None if CONF_SENSORS in config: auto_sensors = config[CONF_SENSORS] _LOGGER.debug(f"Init auto sensors for: {auto_sensors}") else: auto_sensors = [] def add_device(deviceid: str, state: dict, *args): device = registry.devices[deviceid] # device with handlers already added if 'handlers' in device: return else: device['handlers'] = [] device_class = device.get(CONF_DEVICE_CLASS) # ignore device if user wants if device_class == CONF_EXCLUDE: return # TODO: right place? device['available'] = device.get('online') or device.get('host') # collect info for logs device['extra'] = utils.get_device_info(device) # TODO: fix remove camera info from logs state.pop('partnerDevice', None) info = { 'uiid': device['uiid'], 'extra': device['extra'], 'params': state } _LOGGER.debug(f"{deviceid} == Init | {info}") # fix cloud attrs like currentTemperature and currentHumidity get_attrs(state) # set device force_update if needed if force_update and force_update & state.keys(): device[CONF_FORCE_UPDATE] = True if not device_class: device_class = utils.guess_device_class(device) if not device_class: # Fallback guess device_class from device state if 'switch' in state: device_class = default_class elif 'switches' in state: device_class = [default_class] * 4 else: device_class = 'binary_sensor' if isinstance(device_class, str): # read single device_class if device_class in BINARY_DEVICE: device_class = 'binary_sensor' info = {'deviceid': deviceid, 'channels': None} hass.async_create_task( discovery.async_load_platform(hass, device_class, DOMAIN, info, hass_config)) else: # read multichannel device_class for info in utils.parse_multichannel_class(device_class): info['deviceid'] = deviceid hass.async_create_task( discovery.async_load_platform(hass, info.pop('component'), DOMAIN, info, hass_config)) for attribute in auto_sensors: if attribute in state: info = {'deviceid': deviceid, 'attribute': attribute} hass.async_create_task( discovery.async_load_platform(hass, 'sensor', DOMAIN, info, hass_config)) async def send_command(call: ServiceCall): """Service for send raw command to device. :param call: `device` - required param, all other params - optional """ data = dict(call.data) deviceid = str(data.pop('device')) if len(deviceid) == 10: await registry.send(deviceid, data) elif len(deviceid) == 6: await cameras.send(deviceid, data['cmd']) else: _LOGGER.error(f"Wrong deviceid {deviceid}") hass.services.async_register(DOMAIN, 'send_command', send_command) async def update_consumption(call: ServiceCall): if not hasattr(registry, 'consumption'): _LOGGER.debug("Create ConsumptionHelper") registry.consumption = ConsumptionHelper(registry.cloud) await registry.consumption.update() hass.services.async_register(DOMAIN, 'update_consumption', update_consumption) if CONF_SCAN_INTERVAL in config: global SCAN_INTERVAL SCAN_INTERVAL = config[CONF_SCAN_INTERVAL] if mode in ('auto', 'cloud') and has_credentials: utils.handle_cloud_error(hass) # immediately add all cloud devices for deviceid, device in registry.devices.items(): if 'params' not in device: continue conn = 'online' if device['online'] else 'offline' device['params']['cloud'] = conn add_device(deviceid, device['params'], None) await registry.cloud_start() if mode in ('auto', 'local'): # add devices only on first discovery await registry.local_start([add_device]) # cameras starts only on first command to it cameras = EWeLinkCameras() # create binary sensors for RF Bridge if CONF_RFBRIDGE in config: for k, v in config[CONF_RFBRIDGE].items(): v['trigger'] = k hass.async_create_task( discovery.async_load_platform(hass, 'binary_sensor', DOMAIN, v, hass_config)) hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, registry.stop) return True
def add_device(deviceid: str, state: dict, *args): device = registry.devices[deviceid] # device with handlers already added if 'handlers' in device: return else: device['handlers'] = [] device_class = device.get(CONF_DEVICE_CLASS) # ignore device if user wants if device_class == CONF_EXCLUDE: return # TODO: right place? device['available'] = device.get('online') or device.get('host') # collect info for logs device['extra'] = utils.get_device_info(device) # TODO: fix remove camera info from logs state.pop('partnerDevice', None) info = { 'uiid': device['uiid'], 'extra': device['extra'], 'params': state } _LOGGER.debug(f"{deviceid} == Init | {info}") # fix cloud attrs like currentTemperature and currentHumidity get_attrs(state) # set device force_update if needed if force_update and force_update & state.keys(): device[CONF_FORCE_UPDATE] = True if not device_class: device_class = utils.guess_device_class(device) if not device_class: # Fallback guess device_class from device state if 'switch' in state: device_class = default_class elif 'switches' in state: device_class = [default_class] * 4 else: device_class = 'binary_sensor' if isinstance(device_class, str): # read single device_class if device_class in BINARY_DEVICE: device_class = 'binary_sensor' info = {'deviceid': deviceid, 'channels': None} hass.async_create_task( discovery.async_load_platform(hass, device_class, DOMAIN, info, hass_config)) else: # read multichannel device_class for info in utils.parse_multichannel_class(device_class): info['deviceid'] = deviceid hass.async_create_task( discovery.async_load_platform(hass, info.pop('component'), DOMAIN, info, hass_config)) for attribute in auto_sensors: if attribute in state: info = {'deviceid': deviceid, 'attribute': attribute} hass.async_create_task( discovery.async_load_platform(hass, 'sensor', DOMAIN, info, hass_config))
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Eight Sleep component.""" if DOMAIN not in config: return True conf = config[DOMAIN] user = conf[CONF_USERNAME] password = conf[CONF_PASSWORD] eight = EightSleep(user, password, hass.config.time_zone, async_get_clientsession(hass)) hass.data.setdefault(DOMAIN, {}) # Authenticate, build sensors success = await eight.start() if not success: # Authentication failed, cannot continue return False heat_coordinator: DataUpdateCoordinator = DataUpdateCoordinator( hass, _LOGGER, name=f"{DOMAIN}_heat", update_interval=HEAT_SCAN_INTERVAL, update_method=eight.update_device_data, ) user_coordinator: DataUpdateCoordinator = DataUpdateCoordinator( hass, _LOGGER, name=f"{DOMAIN}_user", update_interval=USER_SCAN_INTERVAL, update_method=eight.update_user_data, ) await heat_coordinator.async_config_entry_first_refresh() await user_coordinator.async_config_entry_first_refresh() if not eight.users: # No users, cannot continue return False hass.data[DOMAIN] = { DATA_API: eight, DATA_HEAT: heat_coordinator, DATA_USER: user_coordinator, } for platform in PLATFORMS: hass.async_create_task( discovery.async_load_platform(hass, platform, DOMAIN, {}, config)) async def async_service_handler(service: ServiceCall) -> None: """Handle eight sleep service calls.""" params = service.data.copy() sensor = params.pop(ATTR_ENTITY_ID, None) target = params.pop(ATTR_TARGET_HEAT, None) duration = params.pop(ATTR_HEAT_DURATION, 0) for sens in sensor: side = sens.split("_")[1] userid = eight.fetch_userid(side) usrobj = eight.users[userid] await usrobj.set_heating_level(target, duration) await heat_coordinator.async_request_refresh() # Register services hass.services.async_register(DOMAIN, SERVICE_HEAT_SET, async_service_handler, schema=SERVICE_EIGHT_SCHEMA) return True
async def async_setup(hass, config): """Set up the KNX component.""" try: hass.data[DOMAIN] = KNXModule(hass, config) hass.data[DOMAIN].async_create_exposures() await hass.data[DOMAIN].start() except XKNXException as ex: _LOGGER.warning("Could not connect to KNX interface: %s", ex) hass.components.persistent_notification.async_create( f"Could not connect to KNX interface: <br><b>{ex}</b>", title="KNX") for platform in SupportedPlatforms: if platform.value in config[DOMAIN]: for device_config in config[DOMAIN][platform.value]: create_knx_device(platform, hass.data[DOMAIN].xknx, device_config) # We need to wait until all entities are loaded into the device list since they could also be created from other platforms for platform in SupportedPlatforms: hass.async_create_task( discovery.async_load_platform(hass, platform.value, DOMAIN, {}, config)) if not hass.data[DOMAIN].xknx.devices: _LOGGER.warning( "No KNX devices are configured. Please read " "https://www.home-assistant.io/blog/2020/09/17/release-115/#breaking-changes" ) hass.services.async_register( DOMAIN, SERVICE_KNX_SEND, hass.data[DOMAIN].service_send_to_knx_bus, schema=SERVICE_KNX_SEND_SCHEMA, ) async_register_admin_service( hass, DOMAIN, SERVICE_KNX_EVENT_REGISTER, hass.data[DOMAIN].service_event_register_modify, schema=SERVICE_KNX_EVENT_REGISTER_SCHEMA, ) async def reload_service_handler(service_call: ServiceCallType) -> None: """Remove all KNX components and load new ones from config.""" # First check for config file. If for some reason it is no longer there # or knx is no longer mentioned, stop the reload. config = await async_integration_yaml_config(hass, DOMAIN) if not config or DOMAIN not in config: return await hass.data[DOMAIN].xknx.stop() await asyncio.gather(*[ platform.async_reset() for platform in async_get_platforms(hass, DOMAIN) ]) await async_setup(hass, config) async_register_admin_service(hass, DOMAIN, SERVICE_RELOAD, reload_service_handler, schema=vol.Schema({})) return True
async def async_setup(hass, config): # pylint: disable=unused-argument """Set up this component.""" _LOGGER.info(STARTUP) config_dir = hass.config.path() github_token = config[DOMAIN]["token"] # Configure HACS await configure_hacs(hass, github_token, config_dir) # Check if custom_updater exists for location in CUSTOM_UPDATER_LOCATIONS: if os.path.exists(location.format(config_dir)): msg = CUSTOM_UPDATER_WARNING.format(location.format(config_dir)) _LOGGER.critical(msg) return False # Check if HA is the required version. if parse_version(HAVERSION) < parse_version('0.92.0'): _LOGGER.critical( "You need HA version 92 or newer to use this integration.") return False # Add sensor hass.async_create_task( discovery.async_load_platform(hass, "sensor", DOMAIN, {}, config[DOMAIN])) # Setup startup tasks hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, hacs().startup_tasks()) # Register the views hass.http.register_view(HacsStaticView()) hass.http.register_view(HacsErrorView()) hass.http.register_view(HacsPluginView()) hass.http.register_view(HacsStoreView()) hass.http.register_view(HacsOverviewView()) hass.http.register_view(HacsSettingsView()) hass.http.register_view(HacsRepositoryView()) hass.http.register_view(HacsAPIView()) # Add to sidepanel # TODO: Remove this check when minimum HA version is > 0.94 if parse_version(HAVERSION) < parse_version('0.93.9'): await hass.components.frontend.async_register_built_in_panel( "iframe", IFRAME["title"], IFRAME["icon"], IFRAME["path"], {"url": hacs.url_path["overview"]}, require_admin=IFRAME["require_admin"], ) else: hass.components.frontend.async_register_built_in_panel( "iframe", IFRAME["title"], IFRAME["icon"], IFRAME["path"], {"url": hacs.url_path["overview"]}, require_admin=IFRAME["require_admin"], ) # Mischief managed! return True
async def async_setup(hass, config): """Set up the Ness Alarm platform.""" from nessclient import Client, ArmingState conf = config[DOMAIN] zones = conf[CONF_ZONES] host = conf[CONF_DEVICE_HOST] port = conf[CONF_DEVICE_PORT] scan_interval = conf[CONF_SCAN_INTERVAL] client = Client(host=host, port=port, loop=hass.loop, update_interval=scan_interval.total_seconds()) hass.data[DATA_NESS] = client async def _close(event): await client.close() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _close) hass.async_create_task( async_load_platform(hass, 'binary_sensor', DOMAIN, {CONF_ZONES: zones}, config)) hass.async_create_task( async_load_platform(hass, 'alarm_control_panel', DOMAIN, {}, config)) def on_zone_change(zone_id: int, state: bool): """Receives and propagates zone state updates.""" async_dispatcher_send(hass, SIGNAL_ZONE_CHANGED, ZoneChangedData( zone_id=zone_id, state=state, )) def on_state_change(arming_state: ArmingState): """Receives and propagates arming state updates.""" async_dispatcher_send(hass, SIGNAL_ARMING_STATE_CHANGED, arming_state) client.on_zone_change(on_zone_change) client.on_state_change(on_state_change) # Force update for current arming status and current zone states hass.loop.create_task(client.keepalive()) hass.loop.create_task(client.update()) async def handle_panic(call): await client.panic(call.data[ATTR_CODE]) async def handle_aux(call): await client.aux(call.data[ATTR_OUTPUT_ID], call.data[ATTR_STATE]) hass.services.async_register(DOMAIN, SERVICE_PANIC, handle_panic, schema=SERVICE_SCHEMA_PANIC) hass.services.async_register(DOMAIN, SERVICE_AUX, handle_aux, schema=SERVICE_SCHEMA_AUX) return True
async def async_setup_ipcamera(cam_config): """Set up an IP camera.""" host = cam_config[CONF_HOST] username = cam_config.get(CONF_USERNAME) password = cam_config.get(CONF_PASSWORD) name = cam_config[CONF_NAME] interval = cam_config[CONF_SCAN_INTERVAL] switches = cam_config.get(CONF_SWITCHES) sensors = cam_config.get(CONF_SENSORS) motion = cam_config.get(CONF_MOTION_SENSOR) # Init ip webcam cam = PyDroidIPCam( hass.loop, websession, host, cam_config[CONF_PORT], username=username, password=password, timeout=cam_config[CONF_TIMEOUT], ) if switches is None: switches = [ setting for setting in cam.enabled_settings if setting in SWITCHES ] if sensors is None: sensors = [ sensor for sensor in cam.enabled_sensors if sensor in SENSORS ] sensors.extend(["audio_connections", "video_connections"]) if motion is None: motion = "motion_active" in cam.enabled_sensors async def async_update_data(now): """Update data from IP camera in SCAN_INTERVAL.""" await cam.update() async_dispatcher_send(hass, SIGNAL_UPDATE_DATA, host) async_track_point_in_utc_time(hass, async_update_data, utcnow() + interval) await async_update_data(None) # Load platforms webcams[host] = cam mjpeg_camera = { CONF_PLATFORM: "mjpeg", CONF_MJPEG_URL: cam.mjpeg_url, CONF_STILL_IMAGE_URL: cam.image_url, CONF_NAME: name, } if username and password: mjpeg_camera.update({ CONF_USERNAME: username, CONF_PASSWORD: password }) hass.async_create_task( discovery.async_load_platform(hass, "camera", "mjpeg", mjpeg_camera, config)) if sensors: hass.async_create_task( discovery.async_load_platform( hass, "sensor", DOMAIN, { CONF_NAME: name, CONF_HOST: host, CONF_SENSORS: sensors }, config, )) if switches: hass.async_create_task( discovery.async_load_platform( hass, "switch", DOMAIN, { CONF_NAME: name, CONF_HOST: host, CONF_SWITCHES: switches }, config, )) if motion: hass.async_create_task( discovery.async_load_platform( hass, "binary_sensor", DOMAIN, { CONF_HOST: host, CONF_NAME: name }, config, ))
async def setup_monitored_vars(hass, config, monitored_vars): """Set up requested sensors.""" gw_vars = hass.data[DATA_OPENTHERM_GW][DATA_GW_VARS] sensor_type_map = { COMP_BINARY_SENSOR: [ gw_vars.DATA_MASTER_CH_ENABLED, gw_vars.DATA_MASTER_DHW_ENABLED, gw_vars.DATA_MASTER_COOLING_ENABLED, gw_vars.DATA_MASTER_OTC_ENABLED, gw_vars.DATA_MASTER_CH2_ENABLED, gw_vars.DATA_SLAVE_FAULT_IND, gw_vars.DATA_SLAVE_CH_ACTIVE, gw_vars.DATA_SLAVE_DHW_ACTIVE, gw_vars.DATA_SLAVE_FLAME_ON, gw_vars.DATA_SLAVE_COOLING_ACTIVE, gw_vars.DATA_SLAVE_CH2_ACTIVE, gw_vars.DATA_SLAVE_DIAG_IND, gw_vars.DATA_SLAVE_DHW_PRESENT, gw_vars.DATA_SLAVE_CONTROL_TYPE, gw_vars.DATA_SLAVE_COOLING_SUPPORTED, gw_vars.DATA_SLAVE_DHW_CONFIG, gw_vars.DATA_SLAVE_MASTER_LOW_OFF_PUMP, gw_vars.DATA_SLAVE_CH2_PRESENT, gw_vars.DATA_SLAVE_SERVICE_REQ, gw_vars.DATA_SLAVE_REMOTE_RESET, gw_vars.DATA_SLAVE_LOW_WATER_PRESS, gw_vars.DATA_SLAVE_GAS_FAULT, gw_vars.DATA_SLAVE_AIR_PRESS_FAULT, gw_vars.DATA_SLAVE_WATER_OVERTEMP, gw_vars.DATA_REMOTE_TRANSFER_DHW, gw_vars.DATA_REMOTE_TRANSFER_MAX_CH, gw_vars.DATA_REMOTE_RW_DHW, gw_vars.DATA_REMOTE_RW_MAX_CH, gw_vars.DATA_ROVRD_MAN_PRIO, gw_vars.DATA_ROVRD_AUTO_PRIO, gw_vars.OTGW_GPIO_A_STATE, gw_vars.OTGW_GPIO_B_STATE, gw_vars.OTGW_IGNORE_TRANSITIONS, gw_vars.OTGW_OVRD_HB, ], COMP_SENSOR: [ gw_vars.DATA_CONTROL_SETPOINT, gw_vars.DATA_MASTER_MEMBERID, gw_vars.DATA_SLAVE_MEMBERID, gw_vars.DATA_SLAVE_OEM_FAULT, gw_vars.DATA_COOLING_CONTROL, gw_vars.DATA_CONTROL_SETPOINT_2, gw_vars.DATA_ROOM_SETPOINT_OVRD, gw_vars.DATA_SLAVE_MAX_RELATIVE_MOD, gw_vars.DATA_SLAVE_MAX_CAPACITY, gw_vars.DATA_SLAVE_MIN_MOD_LEVEL, gw_vars.DATA_ROOM_SETPOINT, gw_vars.DATA_REL_MOD_LEVEL, gw_vars.DATA_CH_WATER_PRESS, gw_vars.DATA_DHW_FLOW_RATE, gw_vars.DATA_ROOM_SETPOINT_2, gw_vars.DATA_ROOM_TEMP, gw_vars.DATA_CH_WATER_TEMP, gw_vars.DATA_DHW_TEMP, gw_vars.DATA_OUTSIDE_TEMP, gw_vars.DATA_RETURN_WATER_TEMP, gw_vars.DATA_SOLAR_STORAGE_TEMP, gw_vars.DATA_SOLAR_COLL_TEMP, gw_vars.DATA_CH_WATER_TEMP_2, gw_vars.DATA_DHW_TEMP_2, gw_vars.DATA_EXHAUST_TEMP, gw_vars.DATA_SLAVE_DHW_MAX_SETP, gw_vars.DATA_SLAVE_DHW_MIN_SETP, gw_vars.DATA_SLAVE_CH_MAX_SETP, gw_vars.DATA_SLAVE_CH_MIN_SETP, gw_vars.DATA_DHW_SETPOINT, gw_vars.DATA_MAX_CH_SETPOINT, gw_vars.DATA_OEM_DIAG, gw_vars.DATA_TOTAL_BURNER_STARTS, gw_vars.DATA_CH_PUMP_STARTS, gw_vars.DATA_DHW_PUMP_STARTS, gw_vars.DATA_DHW_BURNER_STARTS, gw_vars.DATA_TOTAL_BURNER_HOURS, gw_vars.DATA_CH_PUMP_HOURS, gw_vars.DATA_DHW_PUMP_HOURS, gw_vars.DATA_DHW_BURNER_HOURS, gw_vars.DATA_MASTER_OT_VERSION, gw_vars.DATA_SLAVE_OT_VERSION, gw_vars.DATA_MASTER_PRODUCT_TYPE, gw_vars.DATA_MASTER_PRODUCT_VERSION, gw_vars.DATA_SLAVE_PRODUCT_TYPE, gw_vars.DATA_SLAVE_PRODUCT_VERSION, gw_vars.OTGW_MODE, gw_vars.OTGW_DHW_OVRD, gw_vars.OTGW_ABOUT, gw_vars.OTGW_BUILD, gw_vars.OTGW_CLOCKMHZ, gw_vars.OTGW_LED_A, gw_vars.OTGW_LED_B, gw_vars.OTGW_LED_C, gw_vars.OTGW_LED_D, gw_vars.OTGW_LED_E, gw_vars.OTGW_LED_F, gw_vars.OTGW_GPIO_A, gw_vars.OTGW_GPIO_B, gw_vars.OTGW_SB_TEMP, gw_vars.OTGW_SETP_OVRD_MODE, gw_vars.OTGW_SMART_PWR, gw_vars.OTGW_THRM_DETECT, gw_vars.OTGW_VREF, ] } binary_sensors = [] sensors = [] for var in monitored_vars: if var in sensor_type_map[COMP_SENSOR]: sensors.append(var) elif var in sensor_type_map[COMP_BINARY_SENSOR]: binary_sensors.append(var) else: _LOGGER.error("Monitored variable not supported: %s", var) if binary_sensors: hass.async_create_task( async_load_platform(hass, COMP_BINARY_SENSOR, DOMAIN, binary_sensors, config)) if sensors: hass.async_create_task( async_load_platform(hass, COMP_SENSOR, DOMAIN, sensors, config))
async def async_setup_entry(hass, config_entry): if DOMAIN not in hass.data: hass.data[DOMAIN] = {} if not config_entry.options: await async_set_options(hass, config_entry) miniserver = MiniServer(hass, config_entry) if not await miniserver.async_setup(): return False for platform in LOXONE_PLATFORMS: _LOGGER.debug("starting loxone {}...".format(platform)) hass.async_create_task( hass.config_entries.async_forward_entry_setup( config_entry, platform)) hass.async_create_task( async_load_platform(hass, platform, DOMAIN, {}, config_entry)) config_entry.add_update_listener(async_config_entry_updated) new_data = _UNDEF if config_entry.unique_id is None: hass.config_entries.async_update_entry(config_entry, unique_id=miniserver.serial, data=new_data) # Workaround await asyncio.sleep(5) hass.data[DOMAIN][config_entry.unique_id] = miniserver await miniserver.async_update_device_registry() async def message_callback(message): """Fire message on HomeAssistant Bus.""" hass.bus.async_fire(EVENT, message) async def handle_websocket_command(call): """Handle websocket command services.""" value = call.data.get(ATTR_VALUE, DEFAULT) device_uuid = call.data.get(ATTR_UUID, DEFAULT) await miniserver.api.send_websocket_command(device_uuid, value) async def loxone_discovered(event): if "component" in event.data: if event.data['component'] == DOMAIN: try: _LOGGER.info("loxone discovered") await asyncio.sleep(0.1) entity_ids = hass.states.async_all() sensors_analog = [] sensors_digital = [] switches = [] covers = [] lights = [] climates = [] for s in entity_ids: s_dict = s.as_dict() attr = s_dict['attributes'] if "plattform" in attr and \ attr['plattform'] == DOMAIN: device_typ = attr.get('device_typ', "") if device_typ == "analog_sensor": sensors_analog.append(s_dict['entity_id']) elif device_typ == "digital_sensor": sensors_digital.append(s_dict['entity_id']) elif device_typ in ["Jalousie", "Gate", "Window"]: covers.append(s_dict['entity_id']) elif device_typ in [ "Switch", "Pushbutton", "TimedSwitch" ]: switches.append(s_dict['entity_id']) elif device_typ in ["LightControllerV2", "Dimmer"]: lights.append(s_dict['entity_id']) elif device_typ == "IRoomControllerV2": climates.append(s_dict['entity_id']) sensors_analog.sort() sensors_digital.sort() covers.sort() switches.sort() lights.sort() climates.sort() await group.Group.async_create_group( hass, "Loxone Analog Sensors", object_id="loxone_analog", entity_ids=sensors_analog) await group.Group.async_create_group( hass, "Loxone Digital Sensors", object_id="loxone_digital", entity_ids=sensors_digital) await group.Group.async_create_group( hass, "Loxone Switches", object_id="loxone_switches", entity_ids=switches) await group.Group.async_create_group( hass, "Loxone Covers", object_id="loxone_covers", entity_ids=covers) await group.Group.async_create_group( hass, "Loxone Lights", object_id="loxone_lights", entity_ids=lights) await group.Group.async_create_group( hass, "Loxone Room Controllers", object_id="loxone_climates", entity_ids=climates) await hass.async_block_till_done() await group.Group.async_create_group( hass, "Loxone Group", object_id="loxone_group", entity_ids=[ "group.loxone_analog", "group.loxone_digital", "group.loxone_switches", "group.loxone_covers", "group.loxone_lights", ]) except: traceback.print_exc() await miniserver.async_set_callback(message_callback) hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, miniserver.start_loxone) hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, miniserver.stop_loxone) hass.bus.async_listen_once(EVENT_COMPONENT_LOADED, loxone_discovered) hass.bus.async_listen(SENDDOMAIN, miniserver.listen_loxone_send) hass.bus.async_listen(SECUREDSENDDOMAIN, miniserver.listen_loxone_send) hass.services.async_register(DOMAIN, 'event_websocket_command', handle_websocket_command) return True
def async_setup(hass, config): """Set up the Satel Integra component.""" conf = config.get(DOMAIN) zones = conf.get(CONF_ZONES) host = conf.get(CONF_DEVICE_HOST) port = conf.get(CONF_DEVICE_PORT) partition = conf.get(CONF_DEVICE_PARTITION) from satel_integra.satel_integra import AsyncSatel, AlarmState controller = AsyncSatel(host, port, zones, hass.loop, partition) hass.data[DATA_SATEL] = controller result = yield from controller.connect() if not result: return False @asyncio.coroutine def _close(): controller.close() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _close()) _LOGGER.debug("Arm home config: %s, mode: %s ", conf, conf.get(CONF_ARM_HOME_MODE)) task_control_panel = hass.async_add_job( async_load_platform(hass, 'alarm_control_panel', DOMAIN, conf, config)) task_zones = hass.async_add_job( async_load_platform(hass, 'binary_sensor', DOMAIN, {CONF_ZONES: zones}, config)) yield from asyncio.wait([task_control_panel, task_zones], loop=hass.loop) @callback def alarm_status_update_callback(status): """Send status update received from alarm to home assistant.""" _LOGGER.debug("Alarm status callback, status: %s", status) hass_alarm_status = STATE_ALARM_DISARMED if status == AlarmState.ARMED_MODE0: hass_alarm_status = STATE_ALARM_ARMED_AWAY elif status in [ AlarmState.ARMED_MODE0, AlarmState.ARMED_MODE1, AlarmState.ARMED_MODE2, AlarmState.ARMED_MODE3 ]: hass_alarm_status = STATE_ALARM_ARMED_HOME elif status in [AlarmState.TRIGGERED, AlarmState.TRIGGERED_FIRE]: hass_alarm_status = STATE_ALARM_TRIGGERED elif status == AlarmState.DISARMED: hass_alarm_status = STATE_ALARM_DISARMED _LOGGER.debug("Sending hass_alarm_status: %s...", hass_alarm_status) async_dispatcher_send(hass, SIGNAL_PANEL_MESSAGE, hass_alarm_status) @callback def zones_update_callback(status): """Update zone objects as per notification from the alarm.""" _LOGGER.debug("Zones callback , status: %s", status) async_dispatcher_send(hass, SIGNAL_ZONES_UPDATED, status[ZONES]) # Create a task instead of adding a tracking job, since this task will # run until the connection to satel_integra is closed. hass.loop.create_task(controller.keep_alive()) hass.loop.create_task( controller.monitor_status(alarm_status_update_callback, zones_update_callback)) return True
async def async_setup(hass, hass_config): """Set up the Locative component.""" hass.async_create_task( async_load_platform(hass, 'device_tracker', DOMAIN, {}, hass_config)) return True
async def async_setup(hass: HomeAssistantType, hass_config: dict): config: dict = hass_config[DOMAIN] # init debug if needed if config[CONF_DEBUG]: utils.YandexDebug(hass, _LOGGER) cachefile = hass.config.path(f".{DOMAIN}.json") # нужна собственная сессия со своими кукисами session = async_create_clientsession(hass) quasar = YandexQuasar(session) # если есть логин/пароль - запускаем облачное подключение if CONF_USERNAME in config and CONF_PASSWORD in config: devices = await quasar.init(config[CONF_USERNAME], config[CONF_PASSWORD], cachefile) # если есть токен - то только локальное elif CONF_TOKEN in config: devices = await quasar.load_local_speakers(config[CONF_TOKEN]) else: await utils.error(hass, "Нужны либо логин/пароль, либо token") return False if not devices: await utils.error(hass, "В аккаунте нет устройств") return False confdevices = config.get(CONF_DEVICES) if confdevices: for device in devices: did = device['device_id'] if did in confdevices: device.update(confdevices[did]) if 'host' in device: await quasar.init_local(cachefile) utils.clean_v1(hass.config) # create send_command service async def send_command(call: ServiceCall): data = dict(call.data) device = data.pop('device', None) entity_ids = (data.pop(ATTR_ENTITY_ID, None) or utils.find_station(devices, device)) _LOGGER.debug(f"Send command to: {entity_ids}") if not entity_ids: _LOGGER.error("Entity_id parameter required") return data = { ATTR_ENTITY_ID: entity_ids, ATTR_MEDIA_CONTENT_ID: data.get('text'), ATTR_MEDIA_CONTENT_TYPE: 'dialog', } if data.get('command') == 'dialog' else { ATTR_ENTITY_ID: entity_ids, ATTR_MEDIA_CONTENT_ID: json.dumps(data), ATTR_MEDIA_CONTENT_TYPE: 'json', } await hass.services.async_call(DOMAIN_MP, SERVICE_PLAY_MEDIA, data, blocking=True) hass.services.async_register(DOMAIN, 'send_command', send_command) # create TTS service async def yandex_station_say(call: ServiceCall): entity_ids = (call.data.get(ATTR_ENTITY_ID) or utils.find_station(devices)) _LOGGER.debug(f"Yandex say to: {entity_ids}") if not entity_ids: _LOGGER.error("Entity_id parameter required") return message = call.data.get('message') data = { ATTR_MEDIA_CONTENT_ID: message, ATTR_MEDIA_CONTENT_TYPE: 'tts', ATTR_ENTITY_ID: entity_ids, } await hass.services.async_call(DOMAIN_MP, SERVICE_PLAY_MEDIA, data, blocking=True) hass.services.async_register('tts', config[CONF_TTS_NAME], yandex_station_say) hass.data[DOMAIN] = {'quasar': quasar, 'devices': devices} # создаём все колонки при облачном подключении if quasar.main_token: # настраиваем все колонки в облачном режиме for device in devices: info = { 'device_id': device['device_id'], 'name': device['name'], 'platform': device['platform'] } _LOGGER.debug(f"Инициализация: {info}") hass.async_create_task( discovery.async_load_platform(hass, DOMAIN_MP, DOMAIN, device['device_id'], hass_config)) # создаём служебный медиаплеер if CONF_INTENTS in config: intents: dict = config[CONF_INTENTS] hass.async_create_task( discovery.async_load_platform(hass, DOMAIN_MP, DOMAIN, list(intents.keys()), hass_config)) if quasar.hass_id: for i, intent in enumerate(intents.keys(), 1): try: await quasar.add_intent(intent, intents[intent], i) except: pass # создаём устройства умного дома Яндекса (пока только кондеи) if CONF_INCLUDE in config: for device in quasar.devices: if device['name'] in config[CONF_INCLUDE]: hass.async_create_task( discovery.async_load_platform(hass, 'climate', DOMAIN, device, hass_config)) async def found_local_device(info: dict): """Сообщение от Zeroconf (mDNS). :param info: {device_id, platform, host, port} """ _LOGGER.debug(f"mDNS: {info}") await quasar.init_local(cachefile) for device in devices: if info['device_id'] != device['device_id']: continue device['host'] = info['host'] device['port'] = info['port'] if 'entity' not in device: hass.async_create_task( discovery.async_load_platform(hass, DOMAIN_MP, DOMAIN, device['device_id'], hass_config)) elif device['entity']: await device['entity'].init_local_mode() break zeroconf = await utils.get_zeroconf_singleton(hass) listener = YandexIOListener(hass.loop) listener.start(found_local_device, zeroconf) hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, listener.stop) return True
def discover_device(component, device, dict_id): """Put device in a dictionary and call discovery on it.""" self._hass.data[DATA_ZWAVE_DICT][dict_id] = device yield from discovery.async_load_platform( self._hass, component, DOMAIN, {const.DISCOVERY_DEVICE: dict_id}, self._zwave_config)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Amcrest IP Camera component.""" hass.data.setdefault(DATA_AMCREST, {DEVICES: {}, CAMERAS: []}) for device in config[DOMAIN]: name: str = device[CONF_NAME] username: str = device[CONF_USERNAME] password: str = device[CONF_PASSWORD] api = AmcrestChecker(hass, name, device[CONF_HOST], device[CONF_PORT], username, password) ffmpeg_arguments = device[CONF_FFMPEG_ARGUMENTS] resolution = RESOLUTION_LIST[device[CONF_RESOLUTION]] binary_sensors = device.get(CONF_BINARY_SENSORS) sensors = device.get(CONF_SENSORS) switches = device.get(CONF_SWITCHES) stream_source = device[CONF_STREAM_SOURCE] control_light = device.get(CONF_CONTROL_LIGHT) # currently aiohttp only works with basic authentication # only valid for mjpeg streaming if device[CONF_AUTHENTICATION] == HTTP_BASIC_AUTHENTICATION: authentication: aiohttp.BasicAuth | None = aiohttp.BasicAuth( username, password) else: authentication = None hass.data[DATA_AMCREST][DEVICES][name] = AmcrestDevice( api, authentication, ffmpeg_arguments, stream_source, resolution, control_light, ) hass.async_create_task( discovery.async_load_platform(hass, Platform.CAMERA, DOMAIN, {CONF_NAME: name}, config)) event_codes = set() if binary_sensors: hass.async_create_task( discovery.async_load_platform( hass, Platform.BINARY_SENSOR, DOMAIN, { CONF_NAME: name, CONF_BINARY_SENSORS: binary_sensors }, config, )) event_codes = { sensor.event_code for sensor in BINARY_SENSORS if sensor.key in binary_sensors and not sensor.should_poll and sensor.event_code is not None } _start_event_monitor(hass, name, api, event_codes) if sensors: hass.async_create_task( discovery.async_load_platform( hass, Platform.SENSOR, DOMAIN, { CONF_NAME: name, CONF_SENSORS: sensors }, config, )) if switches: hass.async_create_task( discovery.async_load_platform( hass, Platform.SWITCH, DOMAIN, { CONF_NAME: name, CONF_SWITCHES: switches }, config, )) if not hass.data[DATA_AMCREST][DEVICES]: return False def have_permission(user: User | None, entity_id: str) -> bool: return not user or user.permissions.check_entity( entity_id, POLICY_CONTROL) async def async_extract_from_service(call: ServiceCall) -> list[str]: if call.context.user_id: user = await hass.auth.async_get_user(call.context.user_id) if user is None: raise UnknownUser(context=call.context) else: user = None if call.data.get(ATTR_ENTITY_ID) == ENTITY_MATCH_ALL: # Return all entity_ids user has permission to control. return [ entity_id for entity_id in hass.data[DATA_AMCREST][CAMERAS] if have_permission(user, entity_id) ] if call.data.get(ATTR_ENTITY_ID) == ENTITY_MATCH_NONE: return [] call_ids = await async_extract_entity_ids(hass, call) entity_ids = [] for entity_id in hass.data[DATA_AMCREST][CAMERAS]: if entity_id not in call_ids: continue if not have_permission(user, entity_id): raise Unauthorized(context=call.context, entity_id=entity_id, permission=POLICY_CONTROL) entity_ids.append(entity_id) return entity_ids async def async_service_handler(call: ServiceCall) -> None: args = [] for arg in CAMERA_SERVICES[call.service][2]: args.append(call.data[arg]) for entity_id in await async_extract_from_service(call): async_dispatcher_send(hass, service_signal(call.service, entity_id), *args) for service, params in CAMERA_SERVICES.items(): hass.services.async_register(DOMAIN, service, async_service_handler, params[0]) return True
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the demo environment.""" if DOMAIN not in config: return True if not hass.config_entries.async_entries(DOMAIN): hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data={})) # Set up demo platforms for platform in COMPONENTS_WITH_DEMO_PLATFORM: hass.async_create_task( async_load_platform(hass, platform, DOMAIN, {}, config)) config.setdefault(ha.DOMAIN, {}) config.setdefault(DOMAIN, {}) # Set up sun if not hass.config.latitude: hass.config.latitude = 32.87336 if not hass.config.longitude: hass.config.longitude = 117.22743 tasks = [setup.async_setup_component(hass, "sun", config)] # Set up input select tasks.append( setup.async_setup_component( hass, "input_select", { "input_select": { "living_room_preset": { "options": ["Visitors", "Visitors with kids", "Home Alone"] }, "who_cooks": { "icon": "mdi:panda", "initial": "Anne Therese", "name": "Cook today", "options": ["Paulus", "Anne Therese"], }, } }, )) # Set up input boolean tasks.append( setup.async_setup_component( hass, "input_boolean", { "input_boolean": { "notify": { "icon": "mdi:car", "initial": False, "name": "Notify Anne Therese is home", } } }, )) # Set up input button tasks.append( setup.async_setup_component( hass, "input_button", { "input_button": { "bell": { "icon": "mdi:bell-ring-outline", "name": "Ring bell", } } }, )) # Set up input number tasks.append( setup.async_setup_component( hass, "input_number", { "input_number": { "noise_allowance": { "icon": "mdi:bell-ring", "min": 0, "max": 10, "name": "Allowed Noise", "unit_of_measurement": SOUND_PRESSURE_DB, } } }, )) results = await asyncio.gather(*tasks) if any(not result for result in results): return False # Set up example persistent notification persistent_notification.async_create( hass, "This is an example of a persistent notification.", title="Example Notification", ) async def demo_start_listener(_event: Event) -> None: """Finish set up.""" await finish_setup(hass, config) hass.bus.async_listen(EVENT_HOMEASSISTANT_START, demo_start_listener) # Create issues async_create_issue( hass, DOMAIN, "transmogrifier_deprecated", breaks_in_ha_version="2023.1.1", is_fixable=False, learn_more_url="https://en.wiktionary.org/wiki/transmogrifier", severity=IssueSeverity.WARNING, translation_key="transmogrifier_deprecated", ) async_create_issue( hass, DOMAIN, "out_of_blinker_fluid", breaks_in_ha_version="2023.1.1", is_fixable=True, learn_more_url="https://www.youtube.com/watch?v=b9rntRxLlbU", severity=IssueSeverity.CRITICAL, translation_key="out_of_blinker_fluid", ) async_create_issue( hass, DOMAIN, "unfixable_problem", is_fixable=False, learn_more_url="https://www.youtube.com/watch?v=dQw4w9WgXcQ", severity=IssueSeverity.WARNING, translation_key="unfixable_problem", ) async_create_issue( hass, DOMAIN, "bad_psu", is_fixable=True, learn_more_url="https://www.youtube.com/watch?v=b9rntRxLlbU", severity=IssueSeverity.CRITICAL, translation_key="bad_psu", ) return True
async def async_setup(hass, config): """Set up the Satel Integra component.""" conf = config.get(DOMAIN) zones = conf.get(CONF_ZONES) outputs = conf.get(CONF_OUTPUTS) switchable_outputs = conf.get(CONF_SWITCHABLE_OUTPUTS) host = conf.get(CONF_HOST) port = conf.get(CONF_PORT) partitions = conf.get(CONF_DEVICE_PARTITIONS) monitored_outputs = collections.OrderedDict( list(outputs.items()) + list(switchable_outputs.items())) controller = AsyncSatel(host, port, hass.loop, zones, monitored_outputs, partitions) hass.data[DATA_SATEL] = controller result = await controller.connect() if not result: return False async def _close(): controller.close() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _close()) _LOGGER.debug("Arm home config: %s, mode: %s ", conf, conf.get(CONF_ARM_HOME_MODE)) hass.async_create_task( async_load_platform(hass, "alarm_control_panel", DOMAIN, conf, config)) hass.async_create_task( async_load_platform( hass, "binary_sensor", DOMAIN, { CONF_ZONES: zones, CONF_OUTPUTS: outputs }, config, )) hass.async_create_task( async_load_platform( hass, "switch", DOMAIN, { CONF_SWITCHABLE_OUTPUTS: switchable_outputs, CONF_DEVICE_CODE: conf.get(CONF_DEVICE_CODE), }, config, )) @callback def alarm_status_update_callback(): """Send status update received from alarm to home assistant.""" _LOGGER.debug("Sending request to update panel state") async_dispatcher_send(hass, SIGNAL_PANEL_MESSAGE) @callback def zones_update_callback(status): """Update zone objects as per notification from the alarm.""" _LOGGER.debug("Zones callback, status: %s", status) async_dispatcher_send(hass, SIGNAL_ZONES_UPDATED, status[ZONES]) @callback def outputs_update_callback(status): """Update zone objects as per notification from the alarm.""" _LOGGER.debug("Outputs updated callback , status: %s", status) async_dispatcher_send(hass, SIGNAL_OUTPUTS_UPDATED, status["outputs"]) # Create a task instead of adding a tracking job, since this task will # run until the connection to satel_integra is closed. hass.loop.create_task(controller.keep_alive()) hass.loop.create_task( controller.monitor_status(alarm_status_update_callback, zones_update_callback, outputs_update_callback)) return True
async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool: """Create a (EMEA/EU-based) Honeywell TCC system.""" async def load_auth_tokens(store) -> Tuple[Dict, Optional[Dict]]: app_storage = await store.async_load() tokens = dict(app_storage if app_storage else {}) if tokens.pop(CONF_USERNAME, None) != config[DOMAIN][CONF_USERNAME]: # any tokens won't be valid, and store might be be corrupt await store.async_save({}) return ({}, None) # evohomeasync2 requires naive/local datetimes as strings if tokens.get(ACCESS_TOKEN_EXPIRES) is not None: tokens[ACCESS_TOKEN_EXPIRES] = _dt_aware_to_naive( dt_util.parse_datetime(tokens[ACCESS_TOKEN_EXPIRES])) user_data = tokens.pop(USER_DATA, None) return (tokens, user_data) store = hass.helpers.storage.Store(STORAGE_VER, STORAGE_KEY) tokens, user_data = await load_auth_tokens(store) client_v2 = evohomeasync2.EvohomeClient( config[DOMAIN][CONF_USERNAME], config[DOMAIN][CONF_PASSWORD], **tokens, session=async_get_clientsession(hass), ) try: await client_v2.login() except (aiohttp.ClientError, evohomeasync2.AuthenticationError) as err: _handle_exception(err) return False finally: config[DOMAIN][CONF_PASSWORD] = "REDACTED" loc_idx = config[DOMAIN][CONF_LOCATION_IDX] try: loc_config = client_v2.installation_info[loc_idx] except IndexError: _LOGGER.error( "Config error: '%s' = %s, but the valid range is 0-%s. " "Unable to continue. Fix any configuration errors and restart HA", CONF_LOCATION_IDX, loc_idx, len(client_v2.installation_info) - 1, ) return False if _LOGGER.isEnabledFor(logging.DEBUG): _config = {"locationInfo": {"timeZone": None}, GWS: [{TCS: None}]} _config["locationInfo"]["timeZone"] = loc_config["locationInfo"][ "timeZone"] _config[GWS][0][TCS] = loc_config[GWS][0][TCS] _LOGGER.debug("Config = %s", _config) client_v1 = evohomeasync.EvohomeClient( client_v2.username, client_v2.password, user_data=user_data, session=async_get_clientsession(hass), ) hass.data[DOMAIN] = {} hass.data[DOMAIN]["broker"] = broker = EvoBroker(hass, client_v2, client_v1, store, config[DOMAIN]) await broker.save_auth_tokens() await broker.async_update() # get initial state hass.async_create_task( async_load_platform(hass, "climate", DOMAIN, {}, config)) if broker.tcs.hotwater: hass.async_create_task( async_load_platform(hass, "water_heater", DOMAIN, {}, config)) hass.helpers.event.async_track_time_interval( broker.async_update, config[DOMAIN][CONF_SCAN_INTERVAL]) setup_service_functions(hass, broker) return True
async def async_setup(hass, config): """Set up this component using YAML.""" async def handle_add_date(call): """Handle the add_date service call.""" for entity_id in call.data.get(CONF_ENTITY_ID): collection_date = call.data.get(CONF_DATE) _LOGGER.debug("called add_date %s from %s", collection_date, entity_id) try: entity = hass.data[DOMAIN][SENSOR_PLATFORM][entity_id] await entity.add_date(collection_date) except Exception as err: _LOGGER.error("Failed adding date for %s - %s", entity_id, err) async def handle_remove_date(call): """Handle the remove_date service call.""" for entity_id in call.data.get(CONF_ENTITY_ID): collection_date = call.data.get(CONF_DATE) _LOGGER.debug("called remove_date %s from %s", collection_date, entity_id) try: entity = hass.data[DOMAIN][SENSOR_PLATFORM][entity_id] await entity.remove_date(collection_date) except Exception as err: _LOGGER.error("Failed removing date for %s - %s", entity_id, err) async def handle_offset_date(call): """Handle the offset_date service call.""" for entity_id in call.data.get(CONF_ENTITY_ID): offset = call.data.get(CONF_OFFSET) collection_date = call.data.get(CONF_DATE) _LOGGER.debug( "called offset_date %s by %d days for %s", collection_date, offset, entity_id, ) try: new_date = collection_date + relativedelta(days=offset) except Exception as err: _LOGGER.error("Failed to offset the date - %s", err) break try: entity = hass.data[DOMAIN][SENSOR_PLATFORM][entity_id] await entity.remove_date(collection_date) await entity.add_date(new_date) except Exception as err: _LOGGER.error("Failed ofsetting date for %s - %s", entity_id, err) async def handle_update_state(call): """Handle the update_state service call.""" for entity_id in call.data.get(CONF_ENTITY_ID): _LOGGER.debug("called update_state for %s", entity_id) try: entity = hass.data[DOMAIN][SENSOR_PLATFORM][entity_id] await entity.async_update_state() except Exception as err: _LOGGER.error("Failed updating state for %s - %s", entity_id, err) async def handle_collect_garbage(call): """Handle the collect_garbage service call.""" for entity_id in call.data.get(CONF_ENTITY_ID): last_collection = call.data.get(ATTR_LAST_COLLECTION) _LOGGER.debug("called collect_garbage for %s", entity_id) try: entity = hass.data[DOMAIN][SENSOR_PLATFORM][entity_id] if last_collection is None: entity.last_collection = dt_util.now() else: entity.last_collection = dt_util.as_local(last_collection) await entity.async_update_state() except Exception as err: _LOGGER.error("Failed setting last collection for %s - %s", entity_id, err) if DOMAIN not in hass.services.async_services(): hass.services.async_register(DOMAIN, "collect_garbage", handle_collect_garbage, schema=COLLECT_NOW_SCHEMA) hass.services.async_register(DOMAIN, "update_state", handle_update_state, schema=UPDATE_STATE_SCHEMA) hass.services.async_register(DOMAIN, "add_date", handle_add_date, schema=ADD_REMOVE_DATE_SCHEMA) hass.services.async_register(DOMAIN, "remove_date", handle_remove_date, schema=ADD_REMOVE_DATE_SCHEMA) hass.services.async_register(DOMAIN, "offset_date", handle_offset_date, schema=OFFSET_DATE_SCHEMA) else: _LOGGER.debug("Services already registered") if config.get(DOMAIN) is None: # We get here if the integration is set up using config flow return True platform_config = config[DOMAIN].get(CONF_SENSORS, {}) # If platform is not enabled, skip. if not platform_config: return False for entry in platform_config: _LOGGER.debug( "Setting %s(%s) from YAML configuration", entry[CONF_NAME], entry[CONF_FREQUENCY], ) # If entry is not enabled, skip. # if not entry[CONF_ENABLED]: # continue hass.async_create_task( discovery.async_load_platform(hass, SENSOR_PLATFORM, DOMAIN, entry, config)) hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data={})) return True