async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Logbook setup.""" hass.data[DOMAIN] = {} @callback def log_message(service: ServiceCall) -> None: """Handle sending notification message service calls.""" message = service.data[ATTR_MESSAGE] name = service.data[ATTR_NAME] domain = service.data.get(ATTR_DOMAIN) entity_id = service.data.get(ATTR_ENTITY_ID) if entity_id is None and domain is None: # If there is no entity_id or # domain, the event will get filtered # away so we use the "logbook" domain domain = DOMAIN message.hass = hass message = message.async_render(parse_result=False) async_log_entry(hass, name, message, domain, entity_id) frontend.async_register_built_in_panel(hass, "logbook", "logbook", "hass:format-list-bulleted-type") if conf := config.get(DOMAIN, {}): filters = sqlalchemy_filter_from_include_exclude_conf(conf) entities_filter = convert_include_exclude_filter(conf)
async def _logbook_filtering(hass, last_changed, last_updated): # pylint: disable=import-outside-toplevel from homeassistant.components import logbook entity_id = "test.entity" old_state = {"entity_id": entity_id, "state": "off"} new_state = { "entity_id": entity_id, "state": "on", "last_updated": last_updated, "last_changed": last_changed, } event = _create_state_changed_event_from_old_new(entity_id, dt_util.utcnow(), old_state, new_state) entity_attr_cache = logbook.EntityAttributeCache(hass) entities_filter = convert_include_exclude_filter( logbook.INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA({})) def yield_events(event): for _ in range(10**5): # pylint: disable=protected-access if logbook._keep_event(hass, event, entities_filter): yield event start = timer() list(logbook.humanify(hass, yield_events(event), entity_attr_cache, {})) return timer() - start
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the recorder.""" conf = config[DOMAIN] entity_filter = convert_include_exclude_filter(conf) auto_purge = conf[CONF_AUTO_PURGE] keep_days = conf[CONF_PURGE_KEEP_DAYS] commit_interval = conf[CONF_COMMIT_INTERVAL] db_max_retries = conf[CONF_DB_MAX_RETRIES] db_retry_wait = conf[CONF_DB_RETRY_WAIT] db_integrity_check = conf[CONF_DB_INTEGRITY_CHECK] db_url = conf.get(CONF_DB_URL) if not db_url: db_url = DEFAULT_URL.format( hass_config_path=hass.config.path(DEFAULT_DB_FILE)) exclude = conf[CONF_EXCLUDE] exclude_t = exclude.get(CONF_EVENT_TYPES, []) instance = hass.data[DATA_INSTANCE] = Recorder( hass=hass, auto_purge=auto_purge, keep_days=keep_days, commit_interval=commit_interval, uri=db_url, db_max_retries=db_max_retries, db_retry_wait=db_retry_wait, entity_filter=entity_filter, exclude_t=exclude_t, db_integrity_check=db_integrity_check, ) instance.async_initialize() instance.start() async def async_handle_purge_service(service): """Handle calls to the purge service.""" instance.do_adhoc_purge(**service.data) hass.services.async_register(DOMAIN, SERVICE_PURGE, async_handle_purge_service, schema=SERVICE_PURGE_SCHEMA) async def async_handle_enable_sevice(service): instance.set_enable(True) hass.services.async_register(DOMAIN, SERVICE_ENABLE, async_handle_enable_sevice, schema=SERVICE_ENABLE_SCHEMA) async def async_handle_disable_service(service): instance.set_enable(False) hass.services.async_register( DOMAIN, SERVICE_DISABLE, async_handle_disable_service, schema=SERVICE_DISABLE_SCHEMA, ) return await instance.async_db_ready
async def test_included_and_excluded_complex_case(hass, recorder_mock): """Test filters with included and excluded with a complex filter.""" filter_accept = {"light.any", "sensor.kitchen_4", "switch.kitchen"} filter_reject = { "camera.one", "notify.any", "automation.update_readme", "automation.update_utilities_cost", "binary_sensor.iss", } conf = { CONF_INCLUDE: { CONF_ENTITIES: ["group.trackers"], }, CONF_EXCLUDE: { CONF_ENTITIES: [ "automation.update_readme", "automation.update_utilities_cost", "binary_sensor.iss", ], CONF_DOMAINS: [ "camera", "group", "media_player", "notify", "scene", "sun", "zone", ], }, } extracted_filter = extract_include_exclude_filter_conf(conf) entity_filter = convert_include_exclude_filter(extracted_filter) sqlalchemy_filter = sqlalchemy_filter_from_include_exclude_conf( extracted_filter) assert sqlalchemy_filter is not None for entity_id in filter_accept: assert entity_filter(entity_id) is True for entity_id in filter_reject: assert entity_filter(entity_id) is False ( filtered_states_entity_ids, filtered_events_entity_ids, ) = await _async_get_states_and_events_with_filter( hass, sqlalchemy_filter, filter_accept | filter_reject) assert filtered_states_entity_ids == filter_accept assert not filtered_states_entity_ids.intersection(filter_reject) assert filtered_events_entity_ids == filter_accept assert not filtered_events_entity_ids.intersection(filter_reject)
async def test_included_and_excluded_simple_case_no_domains( hass, recorder_mock): """Test filters with included and excluded without domains.""" filter_accept = {"sensor.kitchen4", "switch.kitchen"} filter_reject = { "light.any", "switch.other", "cover.any", "sensor.weather5", "light.kitchen", } conf = { CONF_INCLUDE: { CONF_ENTITY_GLOBS: ["sensor.kitchen*"], CONF_ENTITIES: ["switch.kitchen"], }, CONF_EXCLUDE: { CONF_ENTITY_GLOBS: ["sensor.weather*"], CONF_ENTITIES: ["light.kitchen"], }, } extracted_filter = extract_include_exclude_filter_conf(conf) entity_filter = convert_include_exclude_filter(extracted_filter) sqlalchemy_filter = sqlalchemy_filter_from_include_exclude_conf( extracted_filter) assert sqlalchemy_filter is not None for entity_id in filter_accept: assert entity_filter(entity_id) is True for entity_id in filter_reject: assert entity_filter(entity_id) is False assert not entity_filter.explicitly_included("light.any") assert not entity_filter.explicitly_included("switch.other") assert entity_filter.explicitly_included("sensor.kitchen4") assert entity_filter.explicitly_included("switch.kitchen") assert not entity_filter.explicitly_excluded("light.any") assert not entity_filter.explicitly_excluded("switch.other") assert entity_filter.explicitly_excluded("sensor.weather5") assert entity_filter.explicitly_excluded("light.kitchen") ( filtered_states_entity_ids, filtered_events_entity_ids, ) = await _async_get_states_and_events_with_filter( hass, sqlalchemy_filter, filter_accept | filter_reject) assert filtered_states_entity_ids == filter_accept assert not filtered_states_entity_ids.intersection(filter_reject) assert filtered_events_entity_ids == filter_accept assert not filtered_events_entity_ids.intersection(filter_reject)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Logbook setup.""" hass.data[DOMAIN] = {} @callback def log_message(service: ServiceCall) -> None: """Handle sending notification message service calls.""" message = service.data[ATTR_MESSAGE] name = service.data[ATTR_NAME] domain = service.data.get(ATTR_DOMAIN) entity_id = service.data.get(ATTR_ENTITY_ID) if entity_id is None and domain is None: # If there is no entity_id or # domain, the event will get filtered # away so we use the "logbook" domain domain = DOMAIN message.hass = hass message = message.async_render(parse_result=False) async_log_entry(hass, name, message, domain, entity_id, service.context) frontend.async_register_built_in_panel(hass, "logbook", "logbook", "hass:format-list-bulleted-type") recorder_conf = config.get(RECORDER_DOMAIN, {}) logbook_conf = config.get(DOMAIN, {}) recorder_filter = extract_include_exclude_filter_conf(recorder_conf) logbook_filter = extract_include_exclude_filter_conf(logbook_conf) merged_filter = merge_include_exclude_filters(recorder_filter, logbook_filter) possible_merged_entities_filter = convert_include_exclude_filter( merged_filter) if not possible_merged_entities_filter.empty_filter: filters = sqlalchemy_filter_from_include_exclude_conf(merged_filter) entities_filter = possible_merged_entities_filter else: filters = None entities_filter = None hass.data[LOGBOOK_FILTERS] = filters hass.data[LOGBOOK_ENTITIES_FILTER] = entities_filter websocket_api.async_setup(hass) rest_api.async_setup(hass, config, filters, entities_filter) hass.services.async_register(DOMAIN, "log", log_message, schema=LOG_MESSAGE_SCHEMA) await async_process_integration_platforms(hass, DOMAIN, _process_logbook_platform) return True
async def test_same_entity_included_excluded_include_domain_wins( hass, recorder_mock): """Test filters with domain and entities and the include domain wins.""" filter_accept = { "media_player.test2", "media_player.test3", "thermostat.test", } filter_reject = { "thermostat.test2", "zone.home", "script.can_cancel_this_one", } conf = { CONF_INCLUDE: { CONF_DOMAINS: ["media_player"], CONF_ENTITIES: ["thermostat.test"], }, CONF_EXCLUDE: { CONF_DOMAINS: ["thermostat"], CONF_ENTITIES: ["media_player.test"], }, } extracted_filter = extract_include_exclude_filter_conf(conf) entity_filter = convert_include_exclude_filter(extracted_filter) sqlalchemy_filter = sqlalchemy_filter_from_include_exclude_conf( extracted_filter) assert sqlalchemy_filter is not None for entity_id in filter_accept: assert entity_filter(entity_id) is True for entity_id in filter_reject: assert entity_filter(entity_id) is False ( filtered_states_entity_ids, filtered_events_entity_ids, ) = await _async_get_states_and_events_with_filter( hass, sqlalchemy_filter, filter_accept | filter_reject) assert filtered_states_entity_ids == filter_accept assert not filtered_states_entity_ids.intersection(filter_reject) assert filtered_events_entity_ids == filter_accept assert not filtered_events_entity_ids.intersection(filter_reject)
async def async_setup(hass, config): """Set up the MQTT state feed.""" conf = config.get(DOMAIN) publish_filter = convert_include_exclude_filter(conf) base_topic = conf.get(CONF_BASE_TOPIC) publish_attributes = conf.get(CONF_PUBLISH_ATTRIBUTES) publish_timestamps = conf.get(CONF_PUBLISH_TIMESTAMPS) if not base_topic.endswith("/"): base_topic = f"{base_topic}/" async def _state_publisher(entity_id, old_state, new_state): if new_state is None: return if not publish_filter(entity_id): return payload = new_state.state mybase = f"{base_topic}{entity_id.replace('.', '/')}/" await mqtt.async_publish(hass, f"{mybase}state", payload, 1, True) if publish_timestamps: if new_state.last_updated: await mqtt.async_publish( hass, f"{mybase}last_updated", new_state.last_updated.isoformat(), 1, True, ) if new_state.last_changed: await mqtt.async_publish( hass, f"{mybase}last_changed", new_state.last_changed.isoformat(), 1, True, ) if publish_attributes: for key, val in new_state.attributes.items(): encoded_val = json.dumps(val, cls=JSONEncoder) await mqtt.async_publish(hass, mybase + key, encoded_val, 1, True) async_track_state_change(hass, MATCH_ALL, _state_publisher) return True
async def test_specificly_included_entity_always_wins(hass, recorder_mock): """Test specificlly included entity always wins.""" filter_accept = { "media_player.test2", "media_player.test3", "thermostat.test", "binary_sensor.specific_include", } filter_reject = { "binary_sensor.test2", "binary_sensor.home", "binary_sensor.can_cancel_this_one", } conf = { CONF_INCLUDE: { CONF_ENTITIES: ["binary_sensor.specific_include"], }, CONF_EXCLUDE: { CONF_DOMAINS: ["binary_sensor"], CONF_ENTITY_GLOBS: ["binary_sensor.*"], }, } extracted_filter = extract_include_exclude_filter_conf(conf) entity_filter = convert_include_exclude_filter(extracted_filter) sqlalchemy_filter = sqlalchemy_filter_from_include_exclude_conf( extracted_filter) assert sqlalchemy_filter is not None for entity_id in filter_accept: assert entity_filter(entity_id) is True for entity_id in filter_reject: assert entity_filter(entity_id) is False ( filtered_states_entity_ids, filtered_events_entity_ids, ) = await _async_get_states_and_events_with_filter( hass, sqlalchemy_filter, filter_accept | filter_reject) assert filtered_states_entity_ids == filter_accept assert not filtered_states_entity_ids.intersection(filter_reject) assert filtered_events_entity_ids == filter_accept assert not filtered_events_entity_ids.intersection(filter_reject)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the recorder.""" hass.data[DOMAIN] = {} exclude_attributes_by_domain: dict[str, set[str]] = {} hass.data[EXCLUDE_ATTRIBUTES] = exclude_attributes_by_domain conf = config[DOMAIN] entity_filter = convert_include_exclude_filter(conf) auto_purge = conf[CONF_AUTO_PURGE] auto_repack = conf[CONF_AUTO_REPACK] keep_days = conf[CONF_PURGE_KEEP_DAYS] commit_interval = conf[CONF_COMMIT_INTERVAL] db_max_retries = conf[CONF_DB_MAX_RETRIES] db_retry_wait = conf[CONF_DB_RETRY_WAIT] db_url = conf.get(CONF_DB_URL) or DEFAULT_URL.format( hass_config_path=hass.config.path(DEFAULT_DB_FILE)) exclude = conf[CONF_EXCLUDE] exclude_t = exclude.get(CONF_EVENT_TYPES, []) if EVENT_STATE_CHANGED in exclude_t: _LOGGER.warning( "State change events are excluded, recorder will not record state changes." "This will become an error in Home Assistant Core 2022.2") instance = hass.data[DATA_INSTANCE] = Recorder( hass=hass, auto_purge=auto_purge, auto_repack=auto_repack, keep_days=keep_days, commit_interval=commit_interval, uri=db_url, db_max_retries=db_max_retries, db_retry_wait=db_retry_wait, entity_filter=entity_filter, exclude_t=exclude_t, exclude_attributes_by_domain=exclude_attributes_by_domain, ) instance.async_initialize() instance.async_register() instance.start() async_register_services(hass, instance) history.async_setup(hass) statistics.async_setup(hass) websocket_api.async_setup(hass) await async_process_integration_platforms(hass, DOMAIN, _process_recorder_platform) return await instance.async_db_ready
async def async_setup(hass, config): """Logbook setup.""" hass.data[DOMAIN] = {} @callback def log_message(service): """Handle sending notification message service calls.""" message = service.data[ATTR_MESSAGE] name = service.data[ATTR_NAME] domain = service.data.get(ATTR_DOMAIN) entity_id = service.data.get(ATTR_ENTITY_ID) if entity_id is None and domain is None: # If there is no entity_id or # domain, the event will get filtered # away so we use the "logbook" domain domain = DOMAIN message.hass = hass message = message.async_render(parse_result=False) async_log_entry(hass, name, message, domain, entity_id) hass.components.frontend.async_register_built_in_panel( "logbook", "logbook", "hass:format-list-bulleted-type") conf = config.get(DOMAIN, {}) if conf: filters = sqlalchemy_filter_from_include_exclude_conf(conf) entities_filter = convert_include_exclude_filter(conf) else: filters = None entities_filter = None hass.http.register_view(LogbookView(conf, filters, entities_filter)) hass.services.async_register(DOMAIN, "log", log_message, schema=LOG_MESSAGE_SCHEMA) await async_process_integration_platforms(hass, DOMAIN, _process_logbook_platform) return True
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up LTSS.""" conf = config[DOMAIN] db_url = conf.get(CONF_DB_URL) chunk_time_interval = conf.get(CONF_CHUNK_TIME_INTERVAL) entity_filter = convert_include_exclude_filter(conf) instance = LTSS_DB( hass=hass, uri=db_url, chunk_time_interval=chunk_time_interval, entity_filter=entity_filter, ) instance.async_initialize() instance.start() return await instance.async_db_ready
async def test_included_and_excluded_simple_case_no_globs(hass, recorder_mock): """Test filters with included and excluded without globs.""" filter_accept = {"switch.bla", "sensor.blu", "sensor.keep"} filter_reject = {"sensor.bli"} conf = { CONF_INCLUDE: { CONF_DOMAINS: ["sensor", "homeassistant"], CONF_ENTITIES: ["switch.bla"], }, CONF_EXCLUDE: { CONF_DOMAINS: ["switch"], CONF_ENTITIES: ["sensor.bli"], }, } extracted_filter = extract_include_exclude_filter_conf(conf) entity_filter = convert_include_exclude_filter(extracted_filter) sqlalchemy_filter = sqlalchemy_filter_from_include_exclude_conf( extracted_filter) assert sqlalchemy_filter is not None for entity_id in filter_accept: assert entity_filter(entity_id) is True for entity_id in filter_reject: assert entity_filter(entity_id) is False ( filtered_states_entity_ids, filtered_events_entity_ids, ) = await _async_get_states_and_events_with_filter( hass, sqlalchemy_filter, filter_accept | filter_reject) assert filtered_states_entity_ids == filter_accept assert not filtered_states_entity_ids.intersection(filter_reject) assert filtered_events_entity_ids == filter_accept assert not filtered_events_entity_ids.intersection(filter_reject)
def setup(hass, config): """Set up the Zabbix component.""" conf = config[DOMAIN] protocol = "https" if conf[CONF_SSL] else "http" url = urljoin(f"{protocol}://{conf[CONF_HOST]}", conf[CONF_PATH]) username = conf.get(CONF_USERNAME) password = conf.get(CONF_PASSWORD) publish_states_host = conf.get(CONF_PUBLISH_STATES_HOST) entities_filter = convert_include_exclude_filter(conf) try: zapi = ZabbixAPI(url=url, user=username, password=password) _LOGGER.info("Connected to Zabbix API Version %s", zapi.api_version()) except ZabbixAPIException as login_exception: _LOGGER.error("Unable to login to the Zabbix API: %s", login_exception) return False except HTTPError as http_error: _LOGGER.error("HTTPError when connecting to Zabbix API: %s", http_error) zapi = None _LOGGER.error(RETRY_MESSAGE, http_error) event_helper.call_later(hass, RETRY_INTERVAL, lambda _: setup(hass, config)) return True hass.data[DOMAIN] = zapi def event_to_metrics(event, float_keys, string_keys): """Add an event to the outgoing Zabbix list.""" state = event.data.get("new_state") if state is None or state.state in (STATE_UNKNOWN, "", STATE_UNAVAILABLE): return entity_id = state.entity_id if not entities_filter(entity_id): return floats = {} strings = {} try: _state_as_value = float(state.state) floats[entity_id] = _state_as_value except ValueError: try: _state_as_value = float(state_helper.state_as_number(state)) floats[entity_id] = _state_as_value except ValueError: strings[entity_id] = state.state for key, value in state.attributes.items(): # For each value we try to cast it as float # But if we can not do it we store the value # as string attribute_id = f"{entity_id}/{key}" try: float_value = float(value) except (ValueError, TypeError): float_value = None if float_value is None or not math.isfinite(float_value): strings[attribute_id] = str(value) else: floats[attribute_id] = float_value metrics = [] float_keys_count = len(float_keys) float_keys.update(floats) if len(float_keys) != float_keys_count: floats_discovery = [] for float_key in float_keys: floats_discovery.append({"{#KEY}": float_key}) metric = ZabbixMetric( publish_states_host, "homeassistant.floats_discovery", json.dumps(floats_discovery), ) metrics.append(metric) for key, value in floats.items(): metric = ZabbixMetric(publish_states_host, f"homeassistant.float[{key}]", value) metrics.append(metric) string_keys.update(strings) return metrics if publish_states_host: zabbix_sender = ZabbixSender(zabbix_server=conf[CONF_HOST]) instance = ZabbixThread(hass, zabbix_sender, event_to_metrics) instance.setup(hass) return True
def _generate_event_to_json(conf: dict) -> Callable[[dict], str]: """Build event to json converter and add to config.""" entity_filter = convert_include_exclude_filter(conf) tags = conf.get(CONF_TAGS) tags_attributes = conf.get(CONF_TAGS_ATTRIBUTES) default_measurement = conf.get(CONF_DEFAULT_MEASUREMENT) measurement_attr = conf.get(CONF_MEASUREMENT_ATTR) override_measurement = conf.get(CONF_OVERRIDE_MEASUREMENT) global_ignore_attributes = set(conf[CONF_IGNORE_ATTRIBUTES]) component_config = EntityValues( conf[CONF_COMPONENT_CONFIG], conf[CONF_COMPONENT_CONFIG_DOMAIN], conf[CONF_COMPONENT_CONFIG_GLOB], ) def event_to_json(event: dict) -> str: """Convert event into json in format Influx expects.""" state = event.data.get(EVENT_NEW_STATE) if ( state is None or state.state in (STATE_UNKNOWN, "", STATE_UNAVAILABLE) or not entity_filter(state.entity_id) ): return try: _include_state = _include_value = False _state_as_value = float(state.state) _include_value = True except ValueError: try: _state_as_value = float(state_helper.state_as_number(state)) _include_state = _include_value = True except ValueError: _include_state = True include_uom = True include_dc = True entity_config = component_config.get(state.entity_id) measurement = entity_config.get(CONF_OVERRIDE_MEASUREMENT) if measurement in (None, ""): if override_measurement: measurement = override_measurement else: if measurement_attr == "entity_id": measurement = state.entity_id elif measurement_attr == "domain__device_class": device_class = state.attributes.get("device_class") if device_class is None: # This entity doesn't have a device_class set, use only domain measurement = state.domain else: measurement = f"{state.domain}__{device_class}" include_dc = False else: measurement = state.attributes.get(measurement_attr) if measurement in (None, ""): if default_measurement: measurement = default_measurement else: measurement = state.entity_id else: include_uom = measurement_attr != "unit_of_measurement" json = { INFLUX_CONF_MEASUREMENT: measurement, INFLUX_CONF_TAGS: { CONF_DOMAIN: state.domain, CONF_ENTITY_ID: state.object_id, }, INFLUX_CONF_TIME: event.time_fired, INFLUX_CONF_FIELDS: {}, } if _include_state: json[INFLUX_CONF_FIELDS][INFLUX_CONF_STATE] = state.state if _include_value: json[INFLUX_CONF_FIELDS][INFLUX_CONF_VALUE] = _state_as_value ignore_attributes = set(entity_config.get(CONF_IGNORE_ATTRIBUTES, [])) ignore_attributes.update(global_ignore_attributes) for key, value in state.attributes.items(): if key in tags_attributes: json[INFLUX_CONF_TAGS][key] = value elif ( (key != CONF_UNIT_OF_MEASUREMENT or include_uom) and (key != "device_class" or include_dc) and key not in ignore_attributes ): # If the key is already in fields if key in json[INFLUX_CONF_FIELDS]: key = f"{key}_" # Prevent column data errors in influxDB. # For each value we try to cast it as float # But if we can not do it we store the value # as string add "_str" postfix to the field key try: json[INFLUX_CONF_FIELDS][key] = float(value) except (ValueError, TypeError): new_key = f"{key}_str" new_value = str(value) json[INFLUX_CONF_FIELDS][new_key] = new_value if RE_DIGIT_TAIL.match(new_value): json[INFLUX_CONF_FIELDS][key] = float( RE_DECIMAL.sub("", new_value) ) # Infinity and NaN are not valid floats in InfluxDB try: if not math.isfinite(json[INFLUX_CONF_FIELDS][key]): del json[INFLUX_CONF_FIELDS][key] except (KeyError, TypeError): pass json[INFLUX_CONF_TAGS].update(tags) return json return event_to_json
async def async_setup(hass, config): """Set up the MQTT state feed.""" conf = config.get(DOMAIN) publish_filter = convert_include_exclude_filter(conf) has_includes = True if conf.get(CONF_INCLUDE) else False base_topic = conf.get(CONF_BASE_TOPIC) publish_attributes = conf.get(CONF_PUBLISH_ATTRIBUTES) publish_timestamps = conf.get(CONF_PUBLISH_TIMESTAMPS) publish_discovery = conf.get(CONF_PUBLISH_DISCOVERY) if not base_topic.endswith("/"): base_topic = f"{base_topic}/" hass.data[DOMAIN] = {} hass.data[DOMAIN][base_topic] = {} hass.data[DOMAIN][base_topic]["conf_published"] = [] dev_reg = await hass.helpers.device_registry.async_get_registry() ent_reg = await hass.helpers.entity_registry.async_get_registry() async def message_received(msg): """Handle new messages on MQTT.""" explode_topic = msg.topic.split("/") domain = explode_topic[1] entity = explode_topic[2] element = explode_topic[3] _LOGGER.debug( f"Message received: topic {msg.topic}; payload: {msg.payload}") if element == "set": if msg.payload == STATE_ON: await hass.services.async_call( domain, SERVICE_TURN_ON, {ATTR_ENTITY_ID: f"{domain}.{entity}"}) elif msg.payload == STATE_OFF: await hass.services.async_call( domain, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: f"{domain}.{entity}"}) else: _LOGGER.error( f'Invalid service for "set" - payload: {msg.payload} for {entity}' ) if element == "set_light": if domain != "light": _LOGGER.error( f'Invalid domain for "set_light" - payload: {msg.payload} for {entity}' ) else: payload_json = json.loads(msg.payload) service_payload = { ATTR_ENTITY_ID: f"{domain}.{entity}", } if ATTR_TRANSITION in payload_json: service_payload[ATTR_TRANSITION] = payload_json[ ATTR_TRANSITION] if payload_json["state"] == "ON": if ATTR_BRIGHTNESS in payload_json: service_payload[ATTR_BRIGHTNESS] = payload_json[ ATTR_BRIGHTNESS] if ATTR_COLOR_TEMP in payload_json: service_payload[ATTR_COLOR_TEMP] = payload_json[ ATTR_COLOR_TEMP] if ATTR_COLOR in payload_json: if ATTR_H in payload_json[ATTR_COLOR]: service_payload[ATTR_HS_COLOR] = [ payload_json[ATTR_COLOR][ATTR_H], payload_json[ATTR_COLOR][ATTR_S] ] if ATTR_X in payload_json[ATTR_COLOR]: service_payload[ATTR_XY_COLOR] = [ payload_json[ATTR_COLOR][ATTR_X], payload_json[ATTR_COLOR][ATTR_Y] ] if ATTR_R in payload_json[ATTR_COLOR]: service_payload[ATTR_RGB_COLOR] = [ payload_json[ATTR_COLOR][ATTR_R], payload_json[ATTR_COLOR][ATTR_G], payload_json[ATTR_COLOR][ATTR_B] ] await hass.services.async_call(domain, SERVICE_TURN_ON, service_payload) elif payload_json["state"] == "OFF": await hass.services.async_call(domain, SERVICE_TURN_OFF, service_payload) else: _LOGGER.error( f'Invalid state for "set_light" - payload: {msg.payload} for {entity}' ) async def mqtt_publish(topic, payload, qos=None, retain=None): if asyncio.iscoroutinefunction(hass.components.mqtt.async_publish): await hass.components.mqtt.async_publish(hass, topic, payload, qos, retain) else: hass.components.mqtt.publish(topic, payload, qos, retain) async def _state_publisher(entity_id, old_state, new_state): if new_state is None: return if not publish_filter(entity_id): return mybase = f"{base_topic}{entity_id.replace('.', '/')}/" if publish_timestamps: if new_state.last_updated: await mqtt_publish(f"{mybase}last_updated", new_state.last_updated.isoformat(), 1, True) if new_state.last_changed: await mqtt_publish(f"{mybase}last_changed", new_state.last_changed.isoformat(), 1, True) if publish_attributes: for key, val in new_state.attributes.items(): encoded_val = json.dumps(val, cls=JSONEncoder) await mqtt_publish(mybase + key, encoded_val, 1, True) ent_parts = entity_id.split(".") ent_domain = ent_parts[0] ent_id = ent_parts[1] if publish_discovery and not entity_id in hass.data[DOMAIN][ base_topic]["conf_published"]: config = { "uniq_id": f"mqtt_{entity_id}", "name": ent_id.replace("_", " ").title(), "stat_t": f"{mybase}state", "json_attr_t": f"{mybase}attributes", "avty_t": f"{mybase}availability" } if ("device_class" in new_state.attributes): config["dev_cla"] = new_state.attributes["device_class"] if ("unit_of_measurement" in new_state.attributes): config["unit_of_meas"] = new_state.attributes[ "unit_of_measurement"] if ("state_class" in new_state.attributes): config["stat_cla"] = new_state.attributes["state_class"] publish_config = False if ent_domain == "sensor" and (has_includes or "device_class" in new_state.attributes): publish_config = True elif ent_domain == "binary_sensor" and ( has_includes or "device_class" in new_state.attributes): config["pl_off"] = STATE_OFF config["pl_on"] = STATE_ON publish_config = True elif ent_domain == "switch": config["pl_off"] = STATE_OFF config["pl_on"] = STATE_ON config["cmd_t"] = f"{mybase}set" publish_config = True elif ent_domain == "device_tracker": publish_config = True elif ent_domain == "light": del config["json_attr_t"] config["cmd_t"] = f"{mybase}set_light" config["schema"] = "json" supported_features = get_supported_features(hass, entity_id) if supported_features & SUPPORT_BRIGHTNESS: config["brightness"] = True if supported_features & SUPPORT_EFFECT: config["effect"] = True if "supported_color_modes" in new_state.attributes: config["color_mode"] = True config["supported_color_modes"] = new_state.attributes[ "supported_color_modes"] publish_config = True if publish_config: for entry in ent_reg.entities.values(): if entry.entity_id != entity_id: continue for device in dev_reg.devices.values(): if device.id != entry.device_id: continue config["dev"] = {} if device.manufacturer: config["dev"]["mf"] = device.manufacturer if device.model: config["dev"]["mdl"] = device.model if device.name: config["dev"]["name"] = device.name if device.sw_version: config["dev"]["sw"] = device.sw_version if device.identifiers: config["dev"]["ids"] = [ id[1] for id in device.identifiers ] if device.connections: config["dev"]["cns"] = device.connections encoded = json.dumps(config, cls=JSONEncoder) await mqtt_publish(f"{mybase}config", encoded, 1, True) hass.data[DOMAIN][base_topic]["conf_published"].append( entity_id) if publish_discovery: if ent_domain == "light": payload = { "state": "ON" if new_state.state == STATE_ON else "OFF", } if ("brightness" in new_state.attributes): payload["brightness"] = new_state.attributes["brightness"] if ("color_mode" in new_state.attributes): payload["color_mode"] = new_state.attributes["color_mode"] if ("color_temp" in new_state.attributes): payload["color_temp"] = new_state.attributes["color_temp"] if ("effect" in new_state.attributes): payload["effect"] = new_state.attributes["effect"] color = {} if ("hs_color" in new_state.attributes): color["h"] = new_state.attributes["hs_color"][0] color["s"] = new_state.attributes["hs_color"][1] if ("xy_color" in new_state.attributes): color["x"] = new_state.attributes["xy_color"][0] color["x"] = new_state.attributes["xy_color"][1] if ("rgb_color" in new_state.attributes): color["r"] = new_state.attributes["rgb_color"][0] color["g"] = new_state.attributes["rgb_color"][1] color["b"] = new_state.attributes["rgb_color"][2] if color: payload["color"] = color await mqtt_publish(f"{mybase}state", json.dumps(payload, cls=JSONEncoder), 1, True) payload = "offline" if new_state.state in (STATE_UNAVAILABLE, STATE_UNKNOWN, None) else "online" await mqtt_publish(f"{mybase}availability", payload, 1, True) else: payload = new_state.state await mqtt_publish(f"{mybase}state", payload, 1, True) payload = "offline" if new_state.state in (STATE_UNAVAILABLE, STATE_UNKNOWN, None) else "online" await mqtt_publish(f"{mybase}availability", payload, 1, True) attributes = {} for key, val in new_state.attributes.items(): attributes[key] = val encoded = json.dumps(attributes, cls=JSONEncoder) await mqtt_publish(f"{mybase}attributes", encoded, 1, True) else: payload = new_state.state await mqtt_publish(f"{mybase}state", payload, 1, True) if publish_discovery: await hass.components.mqtt.async_subscribe(f"{base_topic}switch/+/set", message_received) await hass.components.mqtt.async_subscribe( f"{base_topic}light/+/set_light", message_received) async_track_state_change(hass, MATCH_ALL, _state_publisher) return True
async def test_specificly_included_entity_always_wins_over_glob( hass, recorder_mock): """Test specificlly included entity always wins over a glob.""" filter_accept = { "sensor.apc900va_status", "sensor.apc900va_battery_charge", "sensor.apc900va_battery_runtime", "sensor.apc900va_load", "sensor.energy_x", } filter_reject = { "sensor.apc900va_not_included", } conf = { CONF_EXCLUDE: { CONF_DOMAINS: [ "updater", "camera", "group", "media_player", "script", "sun", "automation", "zone", "weblink", "scene", "calendar", "weather", "remote", "notify", "switch", "shell_command", "media_player", ], CONF_ENTITY_GLOBS: ["sensor.apc900va_*"], }, CONF_INCLUDE: { CONF_DOMAINS: [ "binary_sensor", "climate", "device_tracker", "input_boolean", "sensor", ], CONF_ENTITY_GLOBS: ["sensor.energy_*"], CONF_ENTITIES: [ "sensor.apc900va_status", "sensor.apc900va_battery_charge", "sensor.apc900va_battery_runtime", "sensor.apc900va_load", ], }, } extracted_filter = extract_include_exclude_filter_conf(conf) entity_filter = convert_include_exclude_filter(extracted_filter) sqlalchemy_filter = sqlalchemy_filter_from_include_exclude_conf( extracted_filter) assert sqlalchemy_filter is not None for entity_id in filter_accept: assert entity_filter(entity_id) is True for entity_id in filter_reject: assert entity_filter(entity_id) is False ( filtered_states_entity_ids, filtered_events_entity_ids, ) = await _async_get_states_and_events_with_filter( hass, sqlalchemy_filter, filter_accept | filter_reject) assert filtered_states_entity_ids == filter_accept assert not filtered_states_entity_ids.intersection(filter_reject) assert filtered_events_entity_ids == filter_accept assert not filtered_events_entity_ids.intersection(filter_reject)
def _get_events(hass, config, start_day, end_day, entity_id=None): """Get events for a period of time.""" entity_attr_cache = EntityAttributeCache(hass) def yield_events(query): """Yield Events that are not filtered away.""" for row in query.yield_per(1000): event = LazyEventPartialState(row) if _keep_event(hass, event, entities_filter, entity_attr_cache): yield event with session_scope(hass=hass) as session: if entity_id is not None: entity_ids = [entity_id.lower()] entities_filter = generate_filter([], entity_ids, [], []) elif config.get(CONF_EXCLUDE) or config.get(CONF_INCLUDE): entities_filter = convert_include_exclude_filter(config) entity_ids = _get_related_entity_ids(session, entities_filter) else: entities_filter = _all_entities_filter entity_ids = None old_state = aliased(States, name="old_state") query = ( session.query( Events.event_type, Events.event_data, Events.time_fired, Events.context_user_id, States.state_id, States.state, States.entity_id, States.domain, States.attributes, old_state.state_id.label("old_state_id"), ).order_by(Events.time_fired).outerjoin( States, (Events.event_id == States.event_id)).outerjoin( old_state, (States.old_state_id == old_state.state_id)) # The below filter, removes state change events that do not have # and old_state, new_state, or the old and # new state are the same for v8 schema or later. # # If the events/states were stored before v8 schema, we relay on the # prev_states dict to remove them. # # When all data is schema v8 or later, the check for EMPTY_JSON_OBJECT # can be removed. .filter((Events.event_type != EVENT_STATE_CHANGED) | (Events.event_data != EMPTY_JSON_OBJECT) | ((States.state_id.isnot(None)) & (old_state.state_id.isnot(None)) & (States.state != old_state.state))).filter( Events.event_type.in_( ALL_EVENT_TYPES + list(hass.data.get(DOMAIN, {})))).filter( (Events.time_fired > start_day) & (Events.time_fired < end_day))) if entity_ids: query = query.filter(((States.last_updated == States.last_changed) & States.entity_id.in_(entity_ids)) | (States.state_id.is_(None))) else: query = query.filter((States.last_updated == States.last_changed) | (States.state_id.is_(None))) # When all data is schema v8 or later, prev_states can be removed prev_states = {} return list( humanify(hass, yield_events(query), entity_attr_cache, prev_states))
async def filtering_entity_id(hass): """Run a 100k state changes through entity filter.""" config = { "include": { "domains": [ "automation", "script", "group", "media_player", "custom_component", ], "entity_globs": [ "binary_sensor.*_contact", "binary_sensor.*_occupancy", "binary_sensor.*_detected", "binary_sensor.*_active", "input_*", "device_tracker.*_phone", "switch.*_light", "binary_sensor.*_charging", "binary_sensor.*_lock", "binary_sensor.*_connected", ], "entities": [ "test.entity_1", "test.entity_2", "binary_sensor.garage_door_open", "test.entity_3", "test.entity_4", ], }, "exclude": { "domains": ["input_number"], "entity_globs": ["media_player.google_*", "group.all_*"], "entities": [], }, } entity_ids = [ "automation.home_arrival", "script.shut_off_house", "binary_sensor.garage_door_open", "binary_sensor.front_door_lock", "binary_sensor.kitchen_motion_sensor_occupancy", "switch.desk_lamp", "light.dining_room", "input_boolean.guest_staying_over", "person.eleanor_fant", "alert.issue_at_home", "calendar.eleanor_fant_s_calendar", "sun.sun", ] entities_filter = convert_include_exclude_filter(config) size = len(entity_ids) start = timer() for i in range(10**5): entities_filter(entity_ids[i % size]) return timer() - start
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the recorder.""" conf = config[DOMAIN] entity_filter = convert_include_exclude_filter(conf) auto_purge = conf[CONF_AUTO_PURGE] keep_days = conf[CONF_PURGE_KEEP_DAYS] # commit_interval = conf[CONF_COMMIT_INTERVAL] # db_max_retries = conf[CONF_DB_MAX_RETRIES] # db_retry_wait = conf[CONF_DB_RETRY_WAIT] # db_url = conf.get(CONF_DB_URL, None) # if not db_url: # db_url = DEFAULT_URL.format(hass_config_path=hass.config.path(DEFAULT_DB_FILE)) # AIS dom fix - get recorder config from file commit_interval = 60 db_max_retries = 10 db_retry_wait = 3 db_integrity_check = conf[CONF_DB_INTEGRITY_CHECK] try: import json from homeassistant.components import ais_files import homeassistant.components.ais_dom.ais_global as ais_global with open(hass.config.config_dir + ais_files.G_DB_SETTINGS_INFO_FILE) as json_file: db_settings = json.load(json_file) ais_global.G_DB_SETTINGS_INFO = db_settings db_url = db_settings["dbUrl"] if db_url == "sqlite:///:memory:": keep_days = 2 else: if db_url.startswith("sqlite://///"): # DB in file from homeassistant.components import ais_usb if ais_usb.is_usb_url_valid_external_drive(db_url) is not True: _LOGGER.error( "Invalid external drive: %s selected for recording! ", db_url) # enable recorder in memory db_url = "sqlite:///:memory:" keep_days = 1 else: keep_days = 10 if "dbKeepDays" in db_settings: keep_days = int(db_settings["dbKeepDays"]) except Exception: # enable recorder in memory db_url = "sqlite:///:memory:" keep_days = 1 exclude = conf[CONF_EXCLUDE] exclude_t = exclude.get(CONF_EVENT_TYPES, []) instance = hass.data[DATA_INSTANCE] = Recorder( hass=hass, auto_purge=auto_purge, keep_days=keep_days, commit_interval=commit_interval, uri=db_url, db_max_retries=db_max_retries, db_retry_wait=db_retry_wait, entity_filter=entity_filter, exclude_t=exclude_t, db_integrity_check=db_integrity_check, ) instance.async_initialize() instance.start() async def async_handle_purge_service(service): """Handle calls to the purge service.""" instance.do_adhoc_purge(**service.data) hass.services.async_register(DOMAIN, SERVICE_PURGE, async_handle_purge_service, schema=SERVICE_PURGE_SCHEMA) return await instance.async_db_ready