def validate_payload(payload, data_type): """Validate OwnTracks payload.""" try: data = json.loads(payload) except ValueError: # If invalid JSON _LOGGER.error('Unable to parse payload as JSON: %s', payload) return None if not isinstance(data, dict) or data.get('_type') != data_type: _LOGGER.debug('Skipping %s update for following data ' 'because of missing or malformatted data: %s', data_type, data) return None if max_gps_accuracy is not None and \ convert(data.get('acc'), float, 0.0) > max_gps_accuracy: _LOGGER.debug('Skipping %s update because expected GPS ' 'accuracy %s is not met: %s', data_type, max_gps_accuracy, data) return None if convert(data.get('acc'), float, 1.0) == 0.0: _LOGGER.debug('Skipping %s update because GPS accuracy' 'is zero', data_type) return None return data
def setup(hass, config): """Setup zone.""" entities = set() for key in extract_domain_configs(config, DOMAIN): entries = config[key] if not isinstance(entries, list): entries = entries, for entry in entries: name = entry.get(CONF_NAME, DEFAULT_NAME) latitude = convert(entry.get(ATTR_LATITUDE), float) longitude = convert(entry.get(ATTR_LONGITUDE), float) radius = convert(entry.get(ATTR_RADIUS, DEFAULT_RADIUS), float) icon = entry.get(ATTR_ICON) passive = entry.get(ATTR_PASSIVE, DEFAULT_PASSIVE) if None in (latitude, longitude): logging.getLogger(__name__).error( 'Each zone needs a latitude and longitude.') continue zone = Zone(hass, name, latitude, longitude, radius, icon, passive) zone.entity_id = generate_entity_id(ENTITY_ID_FORMAT, name, entities) zone.update_ha_state() entities.add(zone.entity_id) if ENTITY_ID_HOME not in entities: zone = Zone(hass, hass.config.location_name, hass.config.latitude, hass.config.longitude, DEFAULT_RADIUS, ICON_HOME, False) zone.entity_id = ENTITY_ID_HOME zone.update_ha_state() return True
def closest(self, *args): """Find closest entity. Closest to home: closest(states) closest(states.device_tracker) closest('group.children') closest(states.group.children) Closest to a point: closest(23.456, 23.456, 'group.children') closest('zone.school', 'group.children') closest(states.zone.school, 'group.children') """ if len(args) == 1: latitude = self._hass.config.latitude longitude = self._hass.config.longitude entities = args[0] elif len(args) == 2: point_state = self._resolve_state(args[0]) if point_state is None: _LOGGER.warning('Closest:Unable to find state %s', args[0]) return None elif not loc_helper.has_location(point_state): _LOGGER.warning( 'Closest:State does not contain valid location: %s', point_state) return None latitude = point_state.attributes.get(ATTR_LATITUDE) longitude = point_state.attributes.get(ATTR_LONGITUDE) entities = args[1] else: latitude = convert(args[0], float) longitude = convert(args[1], float) if latitude is None or longitude is None: _LOGGER.warning( 'Closest:Received invalid coordinates: %s, %s', args[0], args[1]) return None entities = args[2] if isinstance(entities, (AllStates, DomainStates)): states = list(entities) else: if isinstance(entities, State): gr_entity_id = entities.entity_id else: gr_entity_id = str(entities) states = [self._hass.states.get(entity_id) for entity_id in group.expand_entity_ids(self._hass, [gr_entity_id])] return loc_helper.closest(latitude, longitude, states)
def setup_platform(hass, config, add_devices_callback, discovery_info=None): """Read in all of our configuration, and initialize the loopback switch.""" if config.get('sink_name') is None: _LOGGER.error("Missing required variable: sink_name") return False if config.get('source_name') is None: _LOGGER.error("Missing required variable: source_name") return False name = convert(config.get('name'), str, DEFAULT_NAME) sink_name = config.get('sink_name') source_name = config.get('source_name') host = convert(config.get('host'), str, DEFAULT_HOST) port = convert(config.get('port'), int, DEFAULT_PORT) buffer_size = convert(config.get('buffer_size'), int, DEFAULT_BUFFER_SIZE) tcp_timeout = convert(config.get('tcp_timeout'), int, DEFAULT_TCP_TIMEOUT) server_id = str.format("{0}:{1}", host, port) if server_id in _PULSEAUDIO_SERVERS: server = _PULSEAUDIO_SERVERS[server_id] else: server = PAServer(host, port, buffer_size, tcp_timeout) _PULSEAUDIO_SERVERS[server_id] = server add_devices_callback([PALoopbackSwitch( hass, name, server, sink_name, source_name )])
def validate_payload(payload, data_type): """Validate OwnTracks payload.""" try: data = json.loads(payload) except ValueError: # If invalid JSON _LOGGER.error('Unable to parse payload as JSON: %s', payload) return None if not isinstance(data, dict) or data.get('_type') != data_type: _LOGGER.debug( 'Skipping %s update for following data ' 'because of missing or malformatted data: %s', data_type, data) return None if max_gps_accuracy is not None and \ convert(data.get('acc'), float, 0.0) > max_gps_accuracy: _LOGGER.debug( 'Skipping %s update because expected GPS ' 'accuracy %s is not met: %s', data_type, max_gps_accuracy, data) return None if convert(data.get('acc'), float, 1.0) == 0.0: _LOGGER.debug('Skipping %s update because GPS accuracy' 'is zero', data_type) return None return data
def setup_platform(hass, config, add_devices_callback, discovery_info=None): """Read in all of our configuration, and initialize the loopback switch.""" if config.get('sink_name') is None: _LOGGER.error("Missing required variable: sink_name") return False if config.get('source_name') is None: _LOGGER.error("Missing required variable: source_name") return False name = convert(config.get('name'), str, DEFAULT_NAME) sink_name = config.get('sink_name') source_name = config.get('source_name') host = convert(config.get('host'), str, DEFAULT_HOST) port = convert(config.get('port'), int, DEFAULT_PORT) buffer_size = convert(config.get('buffer_size'), int, DEFAULT_BUFFER_SIZE) tcp_timeout = convert(config.get('tcp_timeout'), int, DEFAULT_TCP_TIMEOUT) server_id = str.format("{0}:{1}", host, port) if server_id in _PULSEAUDIO_SERVERS: server = _PULSEAUDIO_SERVERS[server_id] else: server = PAServer(host, port, buffer_size, tcp_timeout) _PULSEAUDIO_SERVERS[server_id] = server add_devices_callback( [PALoopbackSwitch(hass, name, server, sink_name, source_name)])
def setup_platform(hass, config, add_devices, discovery_info=None): """Setup Tellstick sensors.""" import tellcore.telldus as telldus import tellcore.constants as tellcore_constants sensor_value_descriptions = { tellcore_constants.TELLSTICK_TEMPERATURE: DatatypeDescription( 'temperature', config.get('temperature_scale', TEMP_CELSIUS)), tellcore_constants.TELLSTICK_HUMIDITY: DatatypeDescription('humidity', '%'), tellcore_constants.TELLSTICK_RAINRATE: DatatypeDescription('rain rate', ''), tellcore_constants.TELLSTICK_RAINTOTAL: DatatypeDescription('rain total', ''), tellcore_constants.TELLSTICK_WINDDIRECTION: DatatypeDescription('wind direction', ''), tellcore_constants.TELLSTICK_WINDAVERAGE: DatatypeDescription('wind average', ''), tellcore_constants.TELLSTICK_WINDGUST: DatatypeDescription('wind gust', '') } try: core = telldus.TelldusCore() except OSError: logging.getLogger(__name__).exception( 'Could not initialize Tellstick.') return sensors = [] datatype_mask = util.convert(config.get('datatype_mask'), int, 127) for ts_sensor in core.sensors(): try: sensor_name = config[ts_sensor.id] except KeyError: if util.convert(config.get('only_named'), bool, False): continue sensor_name = str(ts_sensor.id) for datatype in sensor_value_descriptions.keys(): if datatype & datatype_mask and ts_sensor.has_value(datatype): sensor_info = sensor_value_descriptions[datatype] sensors.append( TellstickSensor( sensor_name, ts_sensor, datatype, sensor_info)) add_devices(sensors)
def setup(hass, config): """Setup the Splunk component.""" if not validate_config(config, {DOMAIN: ['token']}, _LOGGER): _LOGGER.error("You must include the token for your HTTP " "Event Collector input in Splunk.") return False conf = config[DOMAIN] host = conf[CONF_HOST] port = util.convert(conf.get(CONF_PORT), int, DEFAULT_PORT) token = util.convert(conf.get(CONF_TOKEN), str) use_ssl = util.convert(conf.get(CONF_SSL), bool, DEFAULT_SSL) if use_ssl: uri_scheme = "https://" else: uri_scheme = "http://" event_collector = uri_scheme + host + ":" + str(port) + \ "/services/collector/event" headers = {'Authorization': 'Splunk ' + token} def splunk_event_listener(event): """Listen for new messages on the bus and sends them to Splunk.""" state = event.data.get('new_state') if state is None: return try: _state = state_helper.state_as_number(state) except ValueError: _state = state.state json_body = [ { 'domain': state.domain, 'entity_id': state.object_id, 'attributes': dict(state.attributes), 'time': str(event.time_fired), 'value': _state, } ] try: payload = {"host": event_collector, "event": json_body} requests.post(event_collector, data=json.dumps(payload), headers=headers) except requests.exceptions.RequestException as error: _LOGGER.exception('Error saving event to Splunk: %s', error) hass.bus.listen(EVENT_STATE_CHANGED, splunk_event_listener) return True
def setup(hass, config): """Setup the Splunk component.""" if not validate_config(config, {DOMAIN: ['token']}, _LOGGER): _LOGGER.error("You must include the token for your HTTP " "Event Collector input in Splunk.") return False conf = config[DOMAIN] host = conf[CONF_HOST] port = util.convert(conf.get(CONF_PORT), int, DEFAULT_PORT) token = util.convert(conf.get(CONF_TOKEN), str) use_ssl = util.convert(conf.get(CONF_SSL), bool, DEFAULT_SSL) if use_ssl: uri_scheme = "https://" else: uri_scheme = "http://" event_collector = uri_scheme + host + ":" + str(port) + \ "/services/collector/event" headers = {'Authorization': 'Splunk ' + token} def splunk_event_listener(event): """Listen for new messages on the bus and sends them to Splunk.""" state = event.data.get('new_state') if state is None: return try: _state = state_helper.state_as_number(state) except ValueError: _state = state.state json_body = [{ 'domain': state.domain, 'entity_id': state.object_id, 'attributes': dict(state.attributes), 'time': str(event.time_fired), 'value': _state, }] try: payload = {"host": event_collector, "event": json_body} requests.post(event_collector, data=json.dumps(payload), headers=headers) except requests.exceptions.RequestException as error: _LOGGER.exception('Error saving event to Splunk: %s', error) hass.bus.listen(EVENT_STATE_CHANGED, splunk_event_listener) return True
def _update_hum_sensor(state): """Parse humidity sensor value.""" unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) hum = util.convert(state.state, float) if hum is None: _LOGGER.error('Unable to parse sensor humidity: %s', state.state) return None # check unit if unit != "%": _LOGGER.error( "Humidity sensor has unsupported unit: %s %s", unit, " (allowed: %)") # check range if hum > 100 or hum < 0: _LOGGER.error( "Humidity sensor out of range: %s %s", hum, " (allowed: 0-100%)") return hum
def value_added(node, value): """Called when a value is added to a node on the network.""" for (component, discovery_service, command_ids, value_type, value_genre) in DISCOVERY_COMPONENTS: if value.command_class not in command_ids: continue if value_type is not None and value_type != value.type: continue if value_genre is not None and value_genre != value.genre: continue # Ensure component is loaded bootstrap.setup_component(hass, component, config) # Configure node name = "{}.{}".format(component, _object_id(value)) node_config = customize.get(name, {}) polling_intensity = convert( node_config.get(CONF_POLLING_INTENSITY), int) if polling_intensity: value.enable_poll(polling_intensity) else: value.disable_poll() # Fire discovery event hass.bus.fire( EVENT_PLATFORM_DISCOVERED, { ATTR_SERVICE: discovery_service, ATTR_DISCOVERED: { ATTR_NODE_ID: node.node_id, ATTR_VALUE_ID: value.value_id, } })
def distance(self, *args): """Calculate distance. Will calculate distance from home to a point or between points. Points can be passed in using state objects or lat/lng coordinates. """ locations = [] to_process = list(args) while to_process: value = to_process.pop(0) if isinstance(value, State): latitude = value.attributes.get(ATTR_LATITUDE) longitude = value.attributes.get(ATTR_LONGITUDE) if latitude is None or longitude is None: _LOGGER.warning( 'Distance:State does not contains a location: %s', value) return None else: # We expect this and next value to be lat&lng if not to_process: _LOGGER.warning( 'Distance:Expected latitude and longitude, got %s', value) return None value_2 = to_process.pop(0) latitude = convert(value, float) longitude = convert(value_2, float) if latitude is None or longitude is None: _LOGGER.warning( 'Distance:Unable to process latitude and ' 'longitude: %s, %s', value, value_2) return None locations.append((latitude, longitude)) if len(locations) == 1: return self._hass.config.distance(*locations[0]) return loc_util.distance(*locations[0] + locations[1])
def distance(self, *args): """Calculate distance. Will calculate distance from home to a point or between points. Points can be passed in using state objects or lat/lng coordinates. """ locations = [] to_process = list(args) while to_process: value = to_process.pop(0) if isinstance(value, State): latitude = value.attributes.get(ATTR_LATITUDE) longitude = value.attributes.get(ATTR_LONGITUDE) if latitude is None or longitude is None: _LOGGER.warning( 'Distance:State does not contains a location: %s', value) return None else: # We expect this and next value to be lat&lng if not to_process: _LOGGER.warning( 'Distance:Expected latitude and longitude, got %s', value) return None value_2 = to_process.pop(0) latitude = convert(value, float) longitude = convert(value_2, float) if latitude is None or longitude is None: _LOGGER.warning('Distance:Unable to process latitude and ' 'longitude: %s, %s', value, value_2) return None locations.append((latitude, longitude)) if len(locations) == 1: return self._hass.config.distance(*locations[0]) return loc_util.distance(*locations[0] + locations[1])
def __init__(self, config): """Initialize the scanner.""" self.last_results = [] self.hosts = config[CONF_HOSTS] minutes = convert(config.get(CONF_HOME_INTERVAL), int, 0) self.home_interval = timedelta(minutes=minutes) self.success_init = self._update_info() _LOGGER.info("nmap scanner initialized")
def setup_platform(hass, config, add_devices, discovery_info=None): """Setup the Loop Energy sensors.""" import pyloopenergy elec_serial = config.get('electricity_serial') elec_secret = config.get('electricity_secret') gas_serial = config.get('gas_serial') gas_secret = config.get('gas_secret') gas_type = config.get('gas_type', 'metric') gas_calorific = convert(config.get('gas_calorific'), float, 39.11) if not (elec_serial and elec_secret): _LOGGER.error( "Configuration Error, " "please make sure you have configured electricity " "serial and secret tokens") return None if (gas_serial or gas_secret) and not (gas_serial and gas_secret): _LOGGER.error( "Configuration Error, " "please make sure you have configured gas " "serial and secret tokens") return None if gas_type not in ['imperial', 'metric']: _LOGGER.error( "Configuration Error, 'gas_type' " "can only be 'imperial' or 'metric' ") return None # pylint: disable=too-many-function-args controller = pyloopenergy.LoopEnergy( elec_serial, elec_secret, gas_serial, gas_secret, gas_type, gas_calorific ) def stop_loopenergy(event): """Shutdown loopenergy thread on exit.""" _LOGGER.info("Shutting down loopenergy.") controller.terminate() hass.bus.listen_once(EVENT_BLUMATE_STOP, stop_loopenergy) sensors = [LoopEnergyElec(controller)] if gas_serial: sensors.append(LoopEnergyGas(controller)) add_devices(sensors)
def setup(hass, config): """Setup the StatsD component.""" from statsd.compat import NUM_TYPES import statsd conf = config[DOMAIN] host = conf[CONF_HOST] port = util.convert(conf.get(CONF_PORT), int, DEFAULT_PORT) sample_rate = util.convert(conf.get(CONF_RATE), int, DEFAULT_RATE) prefix = util.convert(conf.get(CONF_PREFIX), str, DEFAULT_PREFIX) statsd_connection = statsd.Connection(host=host, port=port, sample_rate=sample_rate, disabled=False) meter = statsd.Gauge(prefix, statsd_connection) def statsd_event_listener(event): """Listen for new messages on the bus and sends them to StatsD.""" state = event.data.get('new_state') if state is None: return try: _state = state_helper.state_as_number(state) except ValueError: return if not isinstance(_state, NUM_TYPES): return _LOGGER.debug('Sending %s.%s', state.entity_id, _state) meter.send(state.entity_id, _state) hass.bus.listen(EVENT_STATE_CHANGED, statsd_event_listener) return True
def setup(hass, config): """Track the state of the sun.""" if None in (hass.config.latitude, hass.config.longitude): _LOGGER.error("Latitude or longitude not set in Home Assistant config") return False latitude = util.convert(hass.config.latitude, float) longitude = util.convert(hass.config.longitude, float) errors = [] if latitude is None: errors.append('Latitude needs to be a decimal value') elif -90 > latitude < 90: errors.append('Latitude needs to be -90 .. 90') if longitude is None: errors.append('Longitude needs to be a decimal value') elif -180 > longitude < 180: errors.append('Longitude needs to be -180 .. 180') if errors: _LOGGER.error('Invalid configuration received: %s', ", ".join(errors)) return False platform_config = config.get(DOMAIN, {}) elevation = platform_config.get(CONF_ELEVATION) if elevation is None: elevation = location_util.elevation(latitude, longitude) from astral import Location location = Location( ('', '', latitude, longitude, hass.config.time_zone.zone, elevation)) sun = Sun(hass, location) sun.point_in_time_listener(dt_util.utcnow()) return True
def test_convert(self): """Test convert.""" self.assertEqual(5, util.convert("5", int)) self.assertEqual(5.0, util.convert("5", float)) self.assertEqual(True, util.convert("True", bool)) self.assertEqual(1, util.convert("NOT A NUMBER", int, 1)) self.assertEqual(1, util.convert(None, int, 1)) self.assertEqual(1, util.convert(object, int, 1))
def setup_platform(hass, config, add_devices, discovery_info=None): """Setup the Loop Energy sensors.""" import pyloopenergy elec_serial = config.get('electricity_serial') elec_secret = config.get('electricity_secret') gas_serial = config.get('gas_serial') gas_secret = config.get('gas_secret') gas_type = config.get('gas_type', 'metric') gas_calorific = convert(config.get('gas_calorific'), float, 39.11) if not (elec_serial and elec_secret): _LOGGER.error("Configuration Error, " "please make sure you have configured electricity " "serial and secret tokens") return None if (gas_serial or gas_secret) and not (gas_serial and gas_secret): _LOGGER.error("Configuration Error, " "please make sure you have configured gas " "serial and secret tokens") return None if gas_type not in ['imperial', 'metric']: _LOGGER.error("Configuration Error, 'gas_type' " "can only be 'imperial' or 'metric' ") return None # pylint: disable=too-many-function-args controller = pyloopenergy.LoopEnergy(elec_serial, elec_secret, gas_serial, gas_secret, gas_type, gas_calorific) def stop_loopenergy(event): """Shutdown loopenergy thread on exit.""" _LOGGER.info("Shutting down loopenergy.") controller.terminate() hass.bus.listen_once(EVENT_BLUMATE_STOP, stop_loopenergy) sensors = [LoopEnergyElec(controller)] if gas_serial: sensors.append(LoopEnergyGas(controller)) add_devices(sensors)
def setup_platform(hass, config, add_devices_callback, discovery_info=None): """Setup MoldIndicator sensor.""" name = config.get('name', DEFAULT_NAME) indoor_temp_sensor = config.get(CONF_INDOOR_TEMP) outdoor_temp_sensor = config.get(CONF_OUTDOOR_TEMP) indoor_humidity_sensor = config.get(CONF_INDOOR_HUMIDITY) calib_factor = util.convert(config.get(CONF_CALIBRATION_FACTOR), float, None) if None in (indoor_temp_sensor, outdoor_temp_sensor, indoor_humidity_sensor): _LOGGER.error('Missing required key %s, %s or %s', CONF_INDOOR_TEMP, CONF_OUTDOOR_TEMP, CONF_INDOOR_HUMIDITY) return False add_devices_callback([ MoldIndicator(hass, name, indoor_temp_sensor, outdoor_temp_sensor, indoor_humidity_sensor, calib_factor) ])
def _update_hum_sensor(state): """Parse humidity sensor value.""" unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) hum = util.convert(state.state, float) if hum is None: _LOGGER.error('Unable to parse sensor humidity: %s', state.state) return None # check unit if unit != "%": _LOGGER.error("Humidity sensor has unsupported unit: %s %s", unit, " (allowed: %)") # check range if hum > 100 or hum < 0: _LOGGER.error("Humidity sensor out of range: %s %s", hum, " (allowed: 0-100%)") return hum
def setup_platform(hass, config, add_devices_callback, discovery_info=None): """Setup MoldIndicator sensor.""" name = config.get('name', DEFAULT_NAME) indoor_temp_sensor = config.get(CONF_INDOOR_TEMP) outdoor_temp_sensor = config.get(CONF_OUTDOOR_TEMP) indoor_humidity_sensor = config.get(CONF_INDOOR_HUMIDITY) calib_factor = util.convert(config.get(CONF_CALIBRATION_FACTOR), float, None) if None in (indoor_temp_sensor, outdoor_temp_sensor, indoor_humidity_sensor): _LOGGER.error('Missing required key %s, %s or %s', CONF_INDOOR_TEMP, CONF_OUTDOOR_TEMP, CONF_INDOOR_HUMIDITY) return False add_devices_callback([MoldIndicator( hass, name, indoor_temp_sensor, outdoor_temp_sensor, indoor_humidity_sensor, calib_factor)])
def temperature_set_service(service): """Set temperature on the target hvacs.""" target_hvacs = component.extract_from_service(service) temperature = util.convert( service.data.get(ATTR_TEMPERATURE), float) if temperature is None: _LOGGER.error( "Received call to %s without attribute %s", SERVICE_SET_TEMPERATURE, ATTR_TEMPERATURE) return for hvac in target_hvacs: hvac.set_temperature(convert( temperature, hass.config.temperature_unit, hvac.unit_of_measurement)) if hvac.should_poll: hvac.update_ha_state(True)
def _update_temp_sensor(state): """Parse temperature sensor value.""" unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) temp = util.convert(state.state, float) if temp is None: _LOGGER.error('Unable to parse sensor temperature: %s', state.state) return None # convert to celsius if necessary if unit == TEMP_FAHRENHEIT: return util.temperature.fahrenheit_to_celcius(temp) elif unit == TEMP_CELSIUS: return temp else: _LOGGER.error( "Temp sensor has unsupported unit: %s" " (allowed: %s, %s)", unit, TEMP_CELSIUS, TEMP_FAHRENHEIT) return None
def _update_temp_sensor(state): """Parse temperature sensor value.""" unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) temp = util.convert(state.state, float) if temp is None: _LOGGER.error('Unable to parse sensor temperature: %s', state.state) return None # convert to celsius if necessary if unit == TEMP_FAHRENHEIT: return util.temperature.fahrenheit_to_celcius(temp) elif unit == TEMP_CELSIUS: return temp else: _LOGGER.error("Temp sensor has unsupported unit: %s" " (allowed: %s, %s)", unit, TEMP_CELSIUS, TEMP_FAHRENHEIT) return None
def _update_temp(self, state): """Update thermostat with latest state from sensor.""" unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) if unit not in (TEMP_CELSIUS, TEMP_FAHRENHEIT): self._cur_temp = None self._unit = None _LOGGER.error('Sensor has unsupported unit: %s (allowed: %s, %s)', unit, TEMP_CELSIUS, TEMP_FAHRENHEIT) return temp = util.convert(state.state, float) if temp is None: self._cur_temp = None self._unit = None _LOGGER.error('Unable to parse sensor temperature: %s', state.state) return self._cur_temp = temp self._unit = unit
def setup_scanner_platform(hass, config, scanner, see_device): """Helper method to connect scanner-based platform to device tracker.""" interval = util.convert(config.get(CONF_SCAN_INTERVAL), int, DEFAULT_SCAN_INTERVAL) # Initial scan of each mac we also tell about host name for config seen = set() def device_tracker_scan(now): """Called when interval matches.""" for mac in scanner.scan_devices(): if mac in seen: host_name = None else: host_name = scanner.get_device_name(mac) seen.add(mac) see_device(mac=mac, host_name=host_name) track_utc_time_change(hass, device_tracker_scan, second=range(0, 60, interval)) device_tracker_scan(None)
def setup(hass, config): """Set up the HTTP API and debug interface.""" _LOGGER.addFilter(HideSensitiveFilter(hass)) conf = config.get(DOMAIN, {}) api_password = util.convert(conf.get(CONF_API_PASSWORD), str) server_host = conf.get(CONF_SERVER_HOST, '0.0.0.0') server_port = conf.get(CONF_SERVER_PORT, SERVER_PORT) development = str(conf.get(CONF_DEVELOPMENT, "")) == "1" ssl_certificate = conf.get(CONF_SSL_CERTIFICATE) ssl_key = conf.get(CONF_SSL_KEY) cors_origins = conf.get(CONF_CORS_ORIGINS, []) server = HomeAssistantWSGI( hass, development=development, server_host=server_host, server_port=server_port, api_password=api_password, ssl_certificate=ssl_certificate, ssl_key=ssl_key, cors_origins=cors_origins ) hass.bus.listen_once( ha.EVENT_BLUMATE_START, lambda event: threading.Thread(target=server.start, daemon=True, name='WSGI-server').start()) hass.wsgi = server hass.config.api = rem.API(server_host if server_host != '0.0.0.0' else util.get_local_ip(), api_password, server_port, ssl_certificate is not None) return True
def start_zwave(event): """Startup Z-Wave.""" NETWORK.start() # Need to be in STATE_AWAKED before talking to nodes. # Wait up to NETWORK_READY_WAIT_SECS seconds for the zwave network # to be ready. for i in range(NETWORK_READY_WAIT_SECS): _LOGGER.debug("network state: %d %s", NETWORK.state, NETWORK.state_str) if NETWORK.state >= NETWORK.STATE_AWAKED: _LOGGER.info("zwave ready after %d seconds", i) break time.sleep(1) else: _LOGGER.warning( "zwave not ready after %d seconds, continuing anyway", NETWORK_READY_WAIT_SECS) _LOGGER.info("final network state: %d %s", NETWORK.state, NETWORK.state_str) polling_interval = convert(config[DOMAIN].get(CONF_POLLING_INTERVAL), int) if polling_interval is not None: NETWORK.set_poll_interval(polling_interval, False) poll_interval = NETWORK.get_poll_interval() _LOGGER.info("zwave polling interval set to %d ms", poll_interval) hass.bus.listen_once(EVENT_BLUMATE_STOP, stop_zwave) # Register add / remove node services for Z-Wave sticks without # hardware inclusion button hass.services.register(DOMAIN, SERVICE_ADD_NODE, add_node) hass.services.register(DOMAIN, SERVICE_REMOVE_NODE, remove_node) hass.services.register(DOMAIN, SERVICE_HEAL_NETWORK, heal_network) hass.services.register(DOMAIN, SERVICE_SOFT_RESET, soft_reset) hass.services.register(DOMAIN, SERVICE_TEST_NETWORK, test_network)
def setup(hass, config): """Setup the Logentries component.""" if not validate_config(config, {DOMAIN: ['token']}, _LOGGER): _LOGGER.error("Logentries token not present") return False conf = config[DOMAIN] token = util.convert(conf.get(CONF_TOKEN), str) le_wh = DEFAULT_HOST + token def logentries_event_listener(event): """Listen for new messages on the bus and sends them to Logentries.""" state = event.data.get('new_state') if state is None: return try: _state = state_helper.state_as_number(state) except ValueError: _state = state.state json_body = [ { 'domain': state.domain, 'entity_id': state.object_id, 'attributes': dict(state.attributes), 'time': str(event.time_fired), 'value': _state, } ] try: payload = {"host": le_wh, "event": json_body} requests.post(le_wh, data=json.dumps(payload), timeout=10) except requests.exceptions.RequestException as error: _LOGGER.exception('Error sending to Logentries: %s', error) hass.bus.listen(EVENT_STATE_CHANGED, logentries_event_listener) return True
def setup(hass, config): """Setup the InfluxDB component.""" from influxdb import InfluxDBClient, exceptions if not validate_config(config, {DOMAIN: ['host', CONF_USERNAME, CONF_PASSWORD]}, _LOGGER): return False conf = config[DOMAIN] host = conf[CONF_HOST] port = util.convert(conf.get(CONF_PORT), int, DEFAULT_PORT) database = util.convert(conf.get(CONF_DB_NAME), str, DEFAULT_DATABASE) username = util.convert(conf.get(CONF_USERNAME), str) password = util.convert(conf.get(CONF_PASSWORD), str) ssl = util.convert(conf.get(CONF_SSL), bool, DEFAULT_SSL) verify_ssl = util.convert(conf.get(CONF_VERIFY_SSL), bool, DEFAULT_VERIFY_SSL) blacklist = conf.get(CONF_BLACKLIST, []) try: influx = InfluxDBClient(host=host, port=port, username=username, password=password, database=database, ssl=ssl, verify_ssl=verify_ssl) influx.query("select * from /.*/ LIMIT 1;") except exceptions.InfluxDBClientError as exc: _LOGGER.error("Database host is not accessible due to '%s', please " "check your entries in the configuration file and that " "the database exists and is READ/WRITE.", exc) return False def influx_event_listener(event): """Listen for new messages on the bus and sends them to Influx.""" state = event.data.get('new_state') if state is None or state.state in ( STATE_UNKNOWN, '', STATE_UNAVAILABLE) or \ state.entity_id in blacklist: return try: _state = state_helper.state_as_number(state) except ValueError: _state = state.state measurement = state.attributes.get('unit_of_measurement') if measurement in (None, ''): measurement = state.entity_id json_body = [ { 'measurement': measurement, 'tags': { 'domain': state.domain, 'entity_id': state.object_id, }, 'time': event.time_fired, 'fields': { 'value': _state, } } ] try: influx.write_points(json_body) except exceptions.InfluxDBClientError: _LOGGER.exception('Error saving event "%s" to InfluxDB', json_body) hass.bus.listen(EVENT_STATE_CHANGED, influx_event_listener) return True
def __init__(self, bmss, config): """Setup the MongoDB component.""" self.__state = STATE_UNKNOWN self.bmss = bmss self.__config = config[DOMAIN] self.__host = util.convert(self.__config.get(CONF_HOST), str, DEFAULT_HOST) self.__port = util.convert(self.__config.get(CONF_PORT), int, DEFAULT_PORT) self.__tz_aware = util.convert(self.__config.get(CONF_TZ_AWARE), bool, DEFAULT_TZ_AWARE) self.__socket_timeout_ms = util.convert( self.__config.get(CONF_SOCKET_TIMEOUT_MS), int, DEFAULT_SOCKET_TIMEOUT_MS) self.__ssl = util.convert(self.__config.get(CONF_SSL), bool, DEFAULT_SSL) self.__max_pool_size = util.convert( self.__config.get(CONF_MAX_POOL_SIZE), int, DEFAULT_MAX_POOL_SIZE) self.__socket_keep_alive = util.convert( self.__config.get(CONF_SOCKET_KEEP_ALIVE), int, DEFAULT_SOCKET_KEEP_ALIVE) from pymongo import MongoClient from pymongo.monitoring import CommandListener class MongoCommandEvent(CommandListener): """ https://api.mongodb.com/python/current/api/pymongo/monitoring.html#module-pymongo.monitoring """ def started(self, event): _LOGGER.debug("Command {0.command_name} with request id " "{0.request_id} started on server " "{0.connection_id}".format(event)) def succeeded(self, event): _LOGGER.info("Command {0.command_name} with request id " "{0.request_id} on server {0.connection_id} " "succeeded in {0.duration_micros} " "microseconds".format(event)) def failed(self, event): _LOGGER.warn("Command {0.command_name} with request id " "{0.request_id} on server {0.connection_id} " "failed in {0.duration_micros} " "microseconds".format(event)) self.__client = MongoClient(host=self.__host, port=self.__port, tz_aware=self.__tz_aware, maxPoolSize=self.__max_pool_size, socketTimeoutMS=self.__socket_timeout_ms, ssl=self.__ssl, socketKeepAlive=self.__socket_keep_alive, document_class=dict, connect=True, event_listeners=[MongoCommandEvent()]) # Will fail here if connection is not able to be established assert (self.__client is not None) self.__state = STATE_IDLE bmss.bus.listen_once(EVENT_BLUMATE_STOP, self.disconnect) bmss.bus.listen_once(EVENT_BLUMATE_START, self.discover_databases) bmss.services.register(DOMAIN, SERVICE_DISCOVER_DATABASES, self.discover_databases) bmss.services.register(DOMAIN, SERVICE_UNLOCK, self.unlock) bmss.services.register(DOMAIN, SERVICE_DISCONNECT, self.disconnect)
def setup(hass, config): """Setup the InfluxDB component.""" from influxdb import InfluxDBClient, exceptions if not validate_config( config, {DOMAIN: ['host', CONF_USERNAME, CONF_PASSWORD]}, _LOGGER): return False conf = config[DOMAIN] host = conf[CONF_HOST] port = util.convert(conf.get(CONF_PORT), int, DEFAULT_PORT) database = util.convert(conf.get(CONF_DB_NAME), str, DEFAULT_DATABASE) username = util.convert(conf.get(CONF_USERNAME), str) password = util.convert(conf.get(CONF_PASSWORD), str) ssl = util.convert(conf.get(CONF_SSL), bool, DEFAULT_SSL) verify_ssl = util.convert(conf.get(CONF_VERIFY_SSL), bool, DEFAULT_VERIFY_SSL) blacklist = conf.get(CONF_BLACKLIST, []) try: influx = InfluxDBClient(host=host, port=port, username=username, password=password, database=database, ssl=ssl, verify_ssl=verify_ssl) influx.query("select * from /.*/ LIMIT 1;") except exceptions.InfluxDBClientError as exc: _LOGGER.error( "Database host is not accessible due to '%s', please " "check your entries in the configuration file and that " "the database exists and is READ/WRITE.", exc) return False def influx_event_listener(event): """Listen for new messages on the bus and sends them to Influx.""" state = event.data.get('new_state') if state is None or state.state in ( STATE_UNKNOWN, '', STATE_UNAVAILABLE) or \ state.entity_id in blacklist: return try: _state = state_helper.state_as_number(state) except ValueError: _state = state.state measurement = state.attributes.get('unit_of_measurement') if measurement in (None, ''): measurement = state.entity_id json_body = [{ 'measurement': measurement, 'tags': { 'domain': state.domain, 'entity_id': state.object_id, }, 'time': event.time_fired, 'fields': { 'value': _state, } }] try: influx.write_points(json_body) except exceptions.InfluxDBClientError: _LOGGER.exception('Error saving event "%s" to InfluxDB', json_body) hass.bus.listen(EVENT_STATE_CHANGED, influx_event_listener) return True
def __init__(self, bmss, config): """Setup the MongoDB component.""" self.__state = STATE_UNKNOWN self.bmss = bmss self.__config = config[DOMAIN] self.__host = util.convert(self.__config.get(CONF_HOST), str, DEFAULT_HOST) self.__port = util.convert(self.__config.get(CONF_PORT), int, DEFAULT_PORT) self.__tz_aware = util.convert(self.__config.get(CONF_TZ_AWARE), bool, DEFAULT_TZ_AWARE) self.__socket_timeout_ms = util.convert(self.__config.get(CONF_SOCKET_TIMEOUT_MS), int, DEFAULT_SOCKET_TIMEOUT_MS) self.__ssl = util.convert(self.__config.get(CONF_SSL), bool, DEFAULT_SSL) self.__max_pool_size = util.convert(self.__config.get(CONF_MAX_POOL_SIZE), int, DEFAULT_MAX_POOL_SIZE) self.__socket_keep_alive = util.convert(self.__config.get(CONF_SOCKET_KEEP_ALIVE), int, DEFAULT_SOCKET_KEEP_ALIVE) from pymongo import MongoClient from pymongo.monitoring import CommandListener class MongoCommandEvent(CommandListener): """ https://api.mongodb.com/python/current/api/pymongo/monitoring.html#module-pymongo.monitoring """ def started(self, event): _LOGGER.debug("Command {0.command_name} with request id " "{0.request_id} started on server " "{0.connection_id}".format(event)) def succeeded(self, event): _LOGGER.info("Command {0.command_name} with request id " "{0.request_id} on server {0.connection_id} " "succeeded in {0.duration_micros} " "microseconds".format(event)) def failed(self, event): _LOGGER.warn("Command {0.command_name} with request id " "{0.request_id} on server {0.connection_id} " "failed in {0.duration_micros} " "microseconds".format(event)) self.__client = MongoClient(host = self.__host, port = self.__port, tz_aware=self.__tz_aware, maxPoolSize=self.__max_pool_size, socketTimeoutMS =self.__socket_timeout_ms, ssl = self.__ssl, socketKeepAlive = self.__socket_keep_alive, document_class = dict, connect = True, event_listeners = [MongoCommandEvent()]) # Will fail here if connection is not able to be established assert(self.__client is not None) self.__state = STATE_IDLE bmss.bus.listen_once(EVENT_BLUMATE_STOP, self.disconnect) bmss.bus.listen_once(EVENT_BLUMATE_START, self.discover_databases) bmss.services.register(DOMAIN, SERVICE_DISCOVER_DATABASES, self.discover_databases) bmss.services.register(DOMAIN, SERVICE_UNLOCK, self.unlock) bmss.services.register(DOMAIN, SERVICE_DISCONNECT, self.disconnect)
def closest(self, *args): """Find closest entity. Closest to home: closest(states) closest(states.device_tracker) closest('group.children') closest(states.group.children) Closest to a point: closest(23.456, 23.456, 'group.children') closest('zone.school', 'group.children') closest(states.zone.school, 'group.children') """ if len(args) == 1: latitude = self._hass.config.latitude longitude = self._hass.config.longitude entities = args[0] elif len(args) == 2: point_state = self._resolve_state(args[0]) if point_state is None: _LOGGER.warning('Closest:Unable to find state %s', args[0]) return None elif not loc_helper.has_location(point_state): _LOGGER.warning( 'Closest:State does not contain valid location: %s', point_state) return None latitude = point_state.attributes.get(ATTR_LATITUDE) longitude = point_state.attributes.get(ATTR_LONGITUDE) entities = args[1] else: latitude = convert(args[0], float) longitude = convert(args[1], float) if latitude is None or longitude is None: _LOGGER.warning('Closest:Received invalid coordinates: %s, %s', args[0], args[1]) return None entities = args[2] if isinstance(entities, (AllStates, DomainStates)): states = list(entities) else: if isinstance(entities, State): gr_entity_id = entities.entity_id else: gr_entity_id = str(entities) states = [ self._hass.states.get(entity_id) for entity_id in group.expand_entity_ids( self._hass, [gr_entity_id]) ] return loc_helper.closest(latitude, longitude, states)
def setup(hass, config): """Setup device tracker.""" yaml_path = hass.config.path(YAML_DEVICES) conf = config.get(DOMAIN, {}) if isinstance(conf, list) and len(conf) > 0: conf = conf[0] consider_home = timedelta(seconds=util.convert( conf.get(CONF_CONSIDER_HOME), int, DEFAULT_CONSIDER_HOME)) track_new = util.convert(conf.get(CONF_TRACK_NEW), bool, DEFAULT_CONF_TRACK_NEW) home_range = util.convert(conf.get(CONF_HOME_RANGE), int, DEFAULT_HOME_RANGE) devices = load_config(yaml_path, hass, consider_home, home_range) tracker = DeviceTracker(hass, consider_home, track_new, home_range, devices) def setup_platform(p_type, p_config, disc_info=None): """Setup a device tracker platform.""" platform = prepare_setup_platform(hass, config, DOMAIN, p_type) if platform is None: return try: if hasattr(platform, 'get_scanner'): scanner = platform.get_scanner(hass, {DOMAIN: p_config}) if scanner is None: _LOGGER.error('Error setting up platform %s', p_type) return setup_scanner_platform(hass, p_config, scanner, tracker.see) return if not platform.setup_scanner(hass, p_config, tracker.see): _LOGGER.error('Error setting up platform %s', p_type) except Exception: # pylint: disable=broad-except _LOGGER.exception('Error setting up platform %s', p_type) for p_type, p_config in config_per_platform(config, DOMAIN): setup_platform(p_type, p_config) def device_tracker_discovered(service, info): """Called when a device tracker platform is discovered.""" setup_platform(DISCOVERY_PLATFORMS[service], {}, info) discovery.listen(hass, DISCOVERY_PLATFORMS.keys(), device_tracker_discovered) def update_stale(now): """Clean up stale devices.""" tracker.update_stale(now) track_utc_time_change(hass, update_stale, second=range(0, 60, 5)) tracker.setup_group() def see_service(call): """Service to see a device.""" args = { key: value for key, value in call.data.items() if key in (ATTR_MAC, ATTR_DEV_ID, ATTR_HOST_NAME, ATTR_LOCATION_NAME, ATTR_GPS, ATTR_GPS_ACCURACY, ATTR_BATTERY) } tracker.see(**args) descriptions = load_yaml_config_file( os.path.join(os.path.dirname(__file__), 'services.yaml')) hass.services.register(DOMAIN, SERVICE_SEE, see_service, descriptions.get(SERVICE_SEE)) return True
def setup(hass, config): """Setup device tracker.""" yaml_path = hass.config.path(YAML_DEVICES) conf = config.get(DOMAIN, {}) if isinstance(conf, list) and len(conf) > 0: conf = conf[0] consider_home = timedelta( seconds=util.convert(conf.get(CONF_CONSIDER_HOME), int, DEFAULT_CONSIDER_HOME)) track_new = util.convert(conf.get(CONF_TRACK_NEW), bool, DEFAULT_CONF_TRACK_NEW) home_range = util.convert(conf.get(CONF_HOME_RANGE), int, DEFAULT_HOME_RANGE) devices = load_config(yaml_path, hass, consider_home, home_range) tracker = DeviceTracker(hass, consider_home, track_new, home_range, devices) def setup_platform(p_type, p_config, disc_info=None): """Setup a device tracker platform.""" platform = prepare_setup_platform(hass, config, DOMAIN, p_type) if platform is None: return try: if hasattr(platform, 'get_scanner'): scanner = platform.get_scanner(hass, {DOMAIN: p_config}) if scanner is None: _LOGGER.error('Error setting up platform %s', p_type) return setup_scanner_platform(hass, p_config, scanner, tracker.see) return if not platform.setup_scanner(hass, p_config, tracker.see): _LOGGER.error('Error setting up platform %s', p_type) except Exception: # pylint: disable=broad-except _LOGGER.exception('Error setting up platform %s', p_type) for p_type, p_config in config_per_platform(config, DOMAIN): setup_platform(p_type, p_config) def device_tracker_discovered(service, info): """Called when a device tracker platform is discovered.""" setup_platform(DISCOVERY_PLATFORMS[service], {}, info) discovery.listen(hass, DISCOVERY_PLATFORMS.keys(), device_tracker_discovered) def update_stale(now): """Clean up stale devices.""" tracker.update_stale(now) track_utc_time_change(hass, update_stale, second=range(0, 60, 5)) tracker.setup_group() def see_service(call): """Service to see a device.""" args = {key: value for key, value in call.data.items() if key in (ATTR_MAC, ATTR_DEV_ID, ATTR_HOST_NAME, ATTR_LOCATION_NAME, ATTR_GPS, ATTR_GPS_ACCURACY, ATTR_BATTERY)} tracker.see(**args) descriptions = load_yaml_config_file( os.path.join(os.path.dirname(__file__), 'services.yaml')) hass.services.register(DOMAIN, SERVICE_SEE, see_service, descriptions.get(SERVICE_SEE)) return True
def setup_scanner(hass, config, see): """Setup the Bluetooth Scanner.""" # pylint: disable=import-error import bluetooth def see_device(device): """Mark a device as seen.""" see(mac=BT_PREFIX + device[0], host_name=device[1]) def discover_devices(): """Discover bluetooth devices.""" result = bluetooth.discover_devices(duration=8, lookup_names=True, flush_cache=True, lookup_class=False) _LOGGER.debug("Bluetooth devices discovered = " + str(len(result))) return result yaml_path = hass.config.path(YAML_DEVICES) devs_to_track = [] devs_donot_track = [] # Load all known devices. # We just need the devices so set consider_home and home range # to 0 for device in load_config(yaml_path, hass, 0, 0): # check if device is a valid bluetooth device if device.mac and device.mac[:3].upper() == BT_PREFIX: if device.track: devs_to_track.append(device.mac[3:]) else: devs_donot_track.append(device.mac[3:]) # if track new devices is true discover new devices # on startup. track_new = util.convert(config.get(CONF_TRACK_NEW), bool, len(devs_to_track) == 0) if track_new: for dev in discover_devices(): if dev[0] not in devs_to_track and \ dev[0] not in devs_donot_track: devs_to_track.append(dev[0]) see_device(dev) if not devs_to_track: _LOGGER.warning("No bluetooth devices to track!") return False interval = util.convert(config.get(CONF_SCAN_INTERVAL), int, DEFAULT_SCAN_INTERVAL) def update_bluetooth(now): """Lookup bluetooth device and update status.""" try: for mac in devs_to_track: _LOGGER.debug("Scanning " + mac) result = bluetooth.lookup_name(mac, timeout=5) if not result: # Could not lookup device name continue see_device((mac, result)) except bluetooth.BluetoothError: _LOGGER.exception('Error looking up bluetooth device!') track_point_in_utc_time(hass, update_bluetooth, now + timedelta(seconds=interval)) update_bluetooth(dt_util.utcnow()) return True