def test_as_number_coercion(self): """Test state_as_number with number.""" for _state in ('0', '0.0', 0, 0.0): assert 0.0 == state.state_as_number( ha.State('domain.test', _state, {})) for _state in ('1', '1.0', 1, 1.0): assert 1.0 == state.state_as_number( ha.State('domain.test', _state, {}))
def test_as_number_coercion(self): for _state in ('0', '0.0', 0, 0.0): self.assertEqual( 0.0, state.state_as_number( ha.State('domain.test', _state, {}))) for _state in ('1', '1.0', 1, 1.0): self.assertEqual( 1.0, state.state_as_number( ha.State('domain.test', _state, {})))
def test_as_number_states(self): zero_states = (STATE_OFF, STATE_CLOSED, STATE_UNLOCKED, STATE_BELOW_HORIZON) one_states = (STATE_ON, STATE_OPEN, STATE_LOCKED, STATE_ABOVE_HORIZON) for _state in zero_states: self.assertEqual(0, state.state_as_number( ha.State('domain.test', _state, {}))) for _state in one_states: self.assertEqual(1, state.state_as_number( ha.State('domain.test', _state, {})))
def test_as_number_states(self): """Test state_as_number with states.""" zero_states = (STATE_OFF, STATE_CLOSED, STATE_UNLOCKED, STATE_BELOW_HORIZON, STATE_NOT_HOME) one_states = (STATE_ON, STATE_OPEN, STATE_LOCKED, STATE_ABOVE_HORIZON, STATE_HOME) for _state in zero_states: assert 0 == state.state_as_number( ha.State('domain.test', _state, {})) for _state in one_states: assert 1 == state.state_as_number( ha.State('domain.test', _state, {}))
def logentries_event_listener(event): """Listen for new messages on the bus and sends them to Logentries.""" state = event.data.get('new_state') if state is None: return try: _state = state_helper.state_as_number(state) except ValueError: _state = state.state json_body = [ { 'domain': state.domain, 'entity_id': state.object_id, 'attributes': dict(state.attributes), 'time': str(event.time_fired), 'value': _state, } ] try: payload = { "host": le_wh, "event": json_body } requests.post(le_wh, data=json.dumps(payload), timeout=10) except requests.exceptions.RequestException as error: _LOGGER.exception("Error sending to Logentries: %s", error)
def influx_event_listener(event): """ Listen for new messages on the bus and sends them to Influx. """ state = event.data.get('new_state') if state is None or state.state in (STATE_UNKNOWN, ''): return try: _state = state_helper.state_as_number(state) except ValueError: _state = state.state measurement = state.attributes.get('unit_of_measurement') if measurement in (None, ''): measurement = state.entity_id json_body = [ { 'measurement': measurement, 'tags': { 'domain': state.domain, 'entity_id': state.object_id, }, 'time': event.time_fired, 'fields': { 'value': _state, } } ] try: influx.write_points(json_body) except exceptions.InfluxDBClientError: _LOGGER.exception('Error saving event "%s" to InfluxDB', json_body)
def _handle_climate(self, state): temp = state.attributes.get(ATTR_TEMPERATURE) if temp: if self._climate_units == TEMP_FAHRENHEIT: temp = fahrenheit_to_celsius(temp) metric = self._metric( 'temperature_c', self.prometheus_client.Gauge, 'Temperature in degrees Celsius') metric.labels(**self._labels(state)).set(temp) current_temp = state.attributes.get(ATTR_CURRENT_TEMPERATURE) if current_temp: if self._climate_units == TEMP_FAHRENHEIT: current_temp = fahrenheit_to_celsius(current_temp) metric = self._metric( 'current_temperature_c', self.prometheus_client.Gauge, 'Current Temperature in degrees Celsius') metric.labels(**self._labels(state)).set(current_temp) metric = self._metric( 'climate_state', self.prometheus_client.Gauge, 'State of the thermostat (0/1)') try: value = state_helper.state_as_number(state) metric.labels(**self._labels(state)).set(value) except ValueError: pass
def splunk_event_listener(event): """Listen for new messages on the bus and sends them to Splunk.""" state = event.data.get("new_state") if state is None: return try: _state = state_helper.state_as_number(state) except ValueError: _state = state.state json_body = [ { "domain": state.domain, "entity_id": state.object_id, "attributes": dict(state.attributes), "time": str(event.time_fired), "value": _state, } ] try: payload = {"host": event_collector, "event": json_body} requests.post(event_collector, data=json.dumps(payload), headers=headers) except requests.exceptions.RequestException as error: _LOGGER.exception("Error saving event to Splunk: %s", error)
def update_emoncms(time): """Send whitelisted entities states reguarly to Emoncms.""" payload_dict = {} for entity_id in whitelist: state = hass.states.get(entity_id) if state is None or state.state in ( STATE_UNKNOWN, '', STATE_UNAVAILABLE): continue try: payload_dict[entity_id] = state_helper.state_as_number( state) except ValueError: continue if len(payload_dict) > 0: payload = "{%s}" % ",".join("{}:{}".format(key, val) for key, val in payload_dict.items()) send_data(conf.get(CONF_URL), conf.get(CONF_API_KEY), str(conf.get(CONF_INPUTNODE)), payload) track_point_in_time(hass, update_emoncms, time + timedelta(seconds=conf.get(CONF_SCAN_INTERVAL)))
def state_changed_listener(event): """Listen for new messages on the bus and sends them to Datadog.""" state = event.data.get('new_state') if state is None or state.state == STATE_UNKNOWN: return if state.attributes.get('hidden') is True: return states = dict(state.attributes) metric = "{}.{}".format(prefix, state.domain) tags = ["entity:{}".format(state.entity_id)] for key, value in states.items(): if isinstance(value, (float, int)): attribute = "{}.{}".format(metric, key.replace(' ', '_')) statsd.gauge( attribute, value, sample_rate=sample_rate, tags=tags) _LOGGER.debug( "Sent metric %s: %s (tags: %s)", attribute, value, tags) try: value = state_helper.state_as_number(state) except ValueError: _LOGGER.debug( "Error sending %s: %s (tags: %s)", metric, state.state, tags) return statsd.gauge(metric, value, sample_rate=sample_rate, tags=tags) _LOGGER.debug('Sent metric %s: %s (tags: %s)', metric, value, tags)
def splunk_event_listener(event): """Listen for new messages on the bus and sends them to Splunk.""" state = event.data.get('new_state') if state is None: return try: _state = state_helper.state_as_number(state) except ValueError: _state = state.state json_body = [ { 'domain': state.domain, 'entity_id': state.object_id, 'attributes': dict(state.attributes), 'time': str(event.time_fired), 'value': _state, 'host': name, } ] try: payload = { "host": event_collector, "event": json_body, } requests.post(event_collector, data=json.dumps(payload, cls=JSONEncoder), headers=headers, timeout=10) except requests.exceptions.RequestException as error: _LOGGER.exception("Error saving event to Splunk: %s", error)
def _handle_sensor(self, state): unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) metric = state.entity_id.split(".")[1] if '_' not in str(metric): metric = state.entity_id.replace('.', '_') try: int(metric.split("_")[-1]) metric = "_".join(metric.split("_")[:-1]) except ValueError: pass _metric = self._metric(metric, self.prometheus_client.Gauge, state.entity_id) try: value = state_helper.state_as_number(state) if unit == TEMP_FAHRENHEIT: value = fahrenheit_to_celsius(value) _metric.labels(**self._labels(state)).set(value) except ValueError: pass self._battery(state)
def statsd_event_listener(event): """Listen for new messages on the bus and sends them to StatsD.""" state = event.data.get("new_state") if state is None: return try: _state = state_helper.state_as_number(state) except ValueError: return states = dict(state.attributes) _LOGGER.debug("Sending %s.%s", state.entity_id, _state) if show_attribute_flag is True: statsd_client.gauge("%s.state" % state.entity_id, _state, sample_rate) # Send attribute values for key, value in states.items(): if isinstance(value, (float, int)): stat = "%s.%s" % (state.entity_id, key.replace(" ", "_")) statsd_client.gauge(stat, value, sample_rate) else: statsd_client.gauge(state.entity_id, _state, sample_rate) # Increment the count statsd_client.incr(state.entity_id, rate=sample_rate)
def _handle_lock(self, state): metric = self._metric( 'lock_state', self.prometheus_client.Gauge, 'State of the lock (0/1)', ) value = state_helper.state_as_number(state) metric.labels(**self._labels(state)).set(value)
def _handle_device_tracker(self, state): metric = self._metric( 'device_tracker_state', self.prometheus_client.Gauge, 'State of the device tracker (0/1)', ) value = state_helper.state_as_number(state) metric.labels(**self._labels(state)).set(value)
def _handle_binary_sensor(self, state): metric = self._metric( 'binary_sensor_state', self.prometheus_client.Gauge, 'State of the binary sensor (0/1)', ) value = state_helper.state_as_number(state) metric.labels(**self._labels(state)).set(value)
def state_as_number(state): """Return a state casted to a float.""" try: value = state_helper.state_as_number(state) except ValueError: _LOGGER.warning("Could not convert %s to float", state) value = 0 return value
def _handle_input_boolean(self, state): metric = self._metric( 'input_boolean_state', self.prometheus_client.Gauge, 'State of the input boolean (0/1)', ) value = state_helper.state_as_number(state) metric.labels(**self._labels(state)).set(value)
def _handle_sensor(self, state): _sensor_types = { TEMP_CELSIUS: ( 'temperature_c', self.prometheus_client.Gauge, 'Temperature in degrees Celsius', ), TEMP_FAHRENHEIT: ( 'temperature_c', self.prometheus_client.Gauge, 'Temperature in degrees Celsius', ), '%': ( 'relative_humidity', self.prometheus_client.Gauge, 'Relative humidity (0..100)', ), 'lux': ( 'light_lux', self.prometheus_client.Gauge, 'Light level in lux', ), 'kWh': ( 'electricity_used_kwh', self.prometheus_client.Gauge, 'Electricity used by this device in KWh', ), 'V': ( 'voltage', self.prometheus_client.Gauge, 'Currently reported voltage in Volts', ), 'W': ( 'electricity_usage_w', self.prometheus_client.Gauge, 'Currently reported electricity draw in Watts', ), 'min': ( 'sensor_min', self.prometheus_client.Gauge, 'Time in minutes reported by a sensor' ), 'Events': ( 'sensor_event_count', self.prometheus_client.Gauge, 'Number of events for a sensor' ), } unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) metric = _sensor_types.get(unit) if metric is not None: metric = self._metric(*metric) try: value = state_helper.state_as_number(state) if unit == TEMP_FAHRENHEIT: value = fahrenheit_to_celsius(value) metric.labels(**self._labels(state)).set(value) except ValueError: pass self._battery(state)
def _handle_switch(self, state): metric = self._metric( 'switch_state', self.prometheus_client.Gauge, 'State of the switch (0/1)', ) try: value = state_helper.state_as_number(state) metric.labels(**self._labels(state)).set(value) except ValueError: pass
def influx_event_listener(event): """Listen for new messages on the bus and sends them to Influx.""" state = event.data.get('new_state') if state is None or state.state in ( STATE_UNKNOWN, '', STATE_UNAVAILABLE) or \ state.entity_id in blacklist: return try: if len(whitelist) > 0 and state.entity_id not in whitelist: return _state = state_helper.state_as_number(state) except ValueError: _state = state.state measurement = state.attributes.get('unit_of_measurement') if measurement in (None, ''): if default_measurement: measurement = default_measurement else: measurement = state.entity_id json_body = [ { 'measurement': measurement, 'tags': { 'domain': state.domain, 'entity_id': state.object_id, }, 'time': event.time_fired, 'fields': { 'value': _state, } } ] for key, value in state.attributes.items(): if key != 'unit_of_measurement': if isinstance(value, (str, float, bool)) or \ key.endswith('_id'): json_body[0]['fields'][key] = value elif isinstance(value, int): # Prevent column data errors in influxDB. json_body[0]['fields'][key] = float(value) json_body[0]['tags'].update(tags) try: influx.write_points(json_body) except exceptions.InfluxDBClientError: _LOGGER.exception('Error saving event "%s" to InfluxDB', json_body)
def event_to_json(event): """Add an event to the outgoing list.""" state = event.data.get('new_state') if state is None or state.state in ( STATE_UNKNOWN, '', STATE_UNAVAILABLE) or \ state.entity_id in blacklist_e or state.domain in blacklist_d: return try: if (whitelist_e and state.entity_id not in whitelist_e) or \ (whitelist_d and state.domain not in whitelist_d): return _include_state = _include_value = False _state_as_value = float(state.state) _include_value = True except ValueError: try: _state_as_value = float(state_helper.state_as_number(state)) _include_state = _include_value = True except ValueError: _include_state = True out_event = { 'tags': { 'domain': state.domain, 'entity_id': state.object_id, }, 'time': event.time_fired.isoformat(), 'fields': {} } if _include_state: out_event['fields']['state'] = state.state if _include_value: out_event['fields']['value'] = _state_as_value for key, value in state.attributes.items(): if key != 'unit_of_measurement': # If the key is already in fields if key in out_event['fields']: key = key + "_" # For each value we try to cast it as float # But if we can not do it we store the value # as string try: out_event['fields'][key] = float(value) except (ValueError, TypeError): out_event['fields'][key] = str(value) return out_event
def test_event_listener(self, mock_requests): """Test event listener.""" self._setup(mock_requests) now = dt_util.now() valid = { '1': 1, '1.0': 1.0, STATE_ON: 1, STATE_OFF: 0, 'foo': 'foo', } for in_, out in valid.items(): state = mock.MagicMock(state=in_, domain='fake', object_id='entity', attributes={'datetime_attr': now}) event = mock.MagicMock(data={'new_state': state}, time_fired=12345) try: out = state_helper.state_as_number(state) except ValueError: out = state.state body = [{ 'domain': 'fake', 'entity_id': 'entity', 'attributes': { 'datetime_attr': now.isoformat() }, 'time': '12345', 'value': out, 'host': 'HASS', }] payload = {'host': 'http://host:8088/services/collector/event', 'event': body} self.handler_method(event) self.assertEqual(self.mock_post.call_count, 1) self.assertEqual( self.mock_post.call_args, mock.call( payload['host'], data=json.dumps(payload), headers={'Authorization': 'Splunk secret'}, timeout=10 ) ) self.mock_post.reset_mock()
def dweet_event_listener(event): """Listen for new messages on the bus and sends them to Dweet.io.""" state = event.data.get('new_state') if state is None or state.state in (STATE_UNKNOWN, '') \ or state.entity_id not in whitelist: return try: _state = state_helper.state_as_number(state) except ValueError: _state = state.state json_body[state.attributes.get('friendly_name')] = _state send_data(name, json_body)
def thingspeak_listener(entity_id, old_state, new_state): """Listen for new events and send them to Thingspeak.""" if new_state is None or new_state.state in ( STATE_UNKNOWN, '', STATE_UNAVAILABLE): return try: if new_state.entity_id != entity: return _state = state_helper.state_as_number(new_state) except ValueError: return try: channel.update({'field1': _state}) except RequestException: _LOGGER.error( "Error while sending value '%s' to Thingspeak", _state)
def _handle_light(self, state): metric = self._metric( 'light_state', self.prometheus_client.Gauge, 'Load level of a light (0..1)', ) try: if 'brightness' in state.attributes: value = state.attributes['brightness'] / 255.0 else: value = state_helper.state_as_number(state) value = value * 100 metric.labels(**self._labels(state)).set(value) except ValueError: pass
def statsd_event_listener(event): """Listen for new messages on the bus and sends them to StatsD.""" state = event.data.get('new_state') if state is None: return try: _state = state_helper.state_as_number(state) except ValueError: return if not isinstance(_state, NUM_TYPES): return _LOGGER.debug('Sending %s.%s', state.entity_id, _state) meter.send(state.entity_id, _state)
def _handle_sensor(self, state): _sensor_types = { TEMP_CELSIUS: ( 'temperature_c', self.prometheus_client.Gauge, 'Temperature in degrees Celsius', ), TEMP_FAHRENHEIT: ( 'temperature_c', self.prometheus_client.Gauge, 'Temperature in degrees Celsius', ), '%': ( 'relative_humidity', self.prometheus_client.Gauge, 'Relative humidity (0..100)', ), 'lux': ( 'light_lux', self.prometheus_client.Gauge, 'Light level in lux', ), 'kWh': ( 'electricity_used_kwh', self.prometheus_client.Gauge, 'Electricity used by this device in KWh', ), 'V': ( 'voltage', self.prometheus_client.Gauge, 'Currently reported voltage in Volts', ), 'W': ( 'electricity_usage_w', self.prometheus_client.Gauge, 'Currently reported electricity draw in Watts', ), } unit = state.attributes.get('unit_of_measurement') metric = _sensor_types.get(unit) if metric is not None: metric = self._metric(*metric) try: value = state_helper.state_as_number(state) metric.labels(**self._labels(state)).set(value) except ValueError: pass self._battery(state)
def _report_attributes(self, entity_id, new_state): now = time.time() things = dict(new_state.attributes) try: things['state'] = state.state_as_number(new_state) except ValueError: pass lines = ['%s.%s.%s %f %i' % (self._prefix, entity_id, key.replace(' ', '_'), value, now) for key, value in things.items() if isinstance(value, (float, int))] if not lines: return _LOGGER.debug('Sending to graphite: %s', lines) try: self._send_to_graphite('\n'.join(lines)) except socket.error: _LOGGER.exception('Failed to send data to graphite')
def shiftr_event_listener(event): """Listen for new messages on the bus and sends them to Shiftr.io.""" state = event.data.get('new_state') topic = state.entity_id.replace('.', '/') try: _state = state_helper.state_as_number(state) except ValueError: _state = state.state try: mqttc.publish(topic, _state, qos=0, retain=False) if state.attributes: for attribute, data in state.attributes.items(): mqttc.publish( '/{}/{}'.format(topic, attribute), str(data), qos=0, retain=False) except RuntimeError: pass
def event_to_json(event: Dict) -> str: """Convert event into json in format Influx expects.""" state = event.data.get(EVENT_NEW_STATE) if ( state is None or state.state in (STATE_UNKNOWN, "", STATE_UNAVAILABLE) or not entity_filter(state.entity_id) ): return try: _include_state = _include_value = False _state_as_value = float(state.state) _include_value = True except ValueError: try: _state_as_value = float(state_helper.state_as_number(state)) _include_state = _include_value = True except ValueError: _include_state = True include_uom = True entity_config = component_config.get(state.entity_id) measurement = entity_config.get(CONF_OVERRIDE_MEASUREMENT) if measurement in (None, ""): if override_measurement: measurement = override_measurement else: measurement = state.attributes.get(CONF_UNIT_OF_MEASUREMENT) if measurement in (None, ""): if default_measurement: measurement = default_measurement else: measurement = state.entity_id else: include_uom = False json = { INFLUX_CONF_MEASUREMENT: measurement, INFLUX_CONF_TAGS: { CONF_DOMAIN: state.domain, CONF_ENTITY_ID: state.object_id, }, INFLUX_CONF_TIME: event.time_fired, INFLUX_CONF_FIELDS: {}, } if _include_state: json[INFLUX_CONF_FIELDS][INFLUX_CONF_STATE] = state.state if _include_value: json[INFLUX_CONF_FIELDS][INFLUX_CONF_VALUE] = _state_as_value ignore_attributes = set(entity_config.get(CONF_IGNORE_ATTRIBUTES, [])) ignore_attributes.update(global_ignore_attributes) for key, value in state.attributes.items(): if key in tags_attributes: json[INFLUX_CONF_TAGS][key] = value elif ( key != CONF_UNIT_OF_MEASUREMENT or include_uom ) and key not in ignore_attributes: # If the key is already in fields if key in json[INFLUX_CONF_FIELDS]: key = f"{key}_" # Prevent column data errors in influxDB. # For each value we try to cast it as float # But if we can not do it we store the value # as string add "_str" postfix to the field key try: json[INFLUX_CONF_FIELDS][key] = float(value) except (ValueError, TypeError): new_key = f"{key}_str" new_value = str(value) json[INFLUX_CONF_FIELDS][new_key] = new_value if RE_DIGIT_TAIL.match(new_value): json[INFLUX_CONF_FIELDS][key] = float( RE_DECIMAL.sub("", new_value) ) # Infinity and NaN are not valid floats in InfluxDB try: if not math.isfinite(json[INFLUX_CONF_FIELDS][key]): del json[INFLUX_CONF_FIELDS][key] except (KeyError, TypeError): pass json[INFLUX_CONF_TAGS].update(tags) return json
def _state_to_bulk_action(self, state: StateType, time): """Creates a bulk action from the given state object""" try: _state = state_helper.state_as_number(state) if not is_valid_number(_state): _state = state.state except ValueError: _state = state.state if time.tzinfo is None: time_tz = time.astimezone(utc) else: time_tz = time orig_attributes = dict(state.attributes) attributes = dict() for orig_key, orig_value in orig_attributes.items(): # ES will attempt to expand any attribute keys which contain a ".", # so we replace them with an "_" instead. # https://github.com/legrego/homeassistant-elasticsearch/issues/92 key = str.replace(orig_key, ".", "_") value = orig_value # Skip any attributes with empty keys. Elasticsearch cannot index these. # https://github.com/legrego/homeassistant-elasticsearch/issues/96 if not key: LOGGER.warning( "Not publishing keyless attribute from entity [%s].", state.entity_id, ) continue # coerce set to list. ES does not handle sets natively if isinstance(orig_value, set): value = list(orig_value) # if the list/tuple contains simple strings, numbers, or booleans, then we should # index the contents as an actual list. Otherwise, we need to serialize # the contents so that we can respect the index mapping # (Arrays of objects cannot be indexed as-is) if value and isinstance(value, (list, tuple)): should_serialize = isinstance(value[0], (tuple, dict, set, list)) else: should_serialize = isinstance(value, dict) attributes[key] = (self._serializer.dumps(value) if should_serialize else value) document_body = { "hass.domain": state.domain, "hass.object_id": state.object_id, "hass.object_id_lower": state.object_id.lower(), "hass.entity_id": state.entity_id, "hass.entity_id_lower": state.entity_id.lower(), "hass.attributes": attributes, "hass.value": _state, "@timestamp": time_tz, } if self._static_doc_properties is None: LOGGER.warning( "Event for entity [%s] is missing static doc properties. This is a bug.", state.entity_id, ) else: document_body.update(self._static_doc_properties) if ("latitude" in document_body["hass.attributes"] and "longitude" in document_body["hass.attributes"]): document_body["hass.geo.location"] = { "lat": document_body["hass.attributes"]["latitude"], "lon": document_body["hass.attributes"]["longitude"], } return { "_op_type": "index", "_index": self._index_alias, "_source": document_body, # If we aren't writing to an alias, that means the # Index Template likely wasn't created properly, and we should bail. "require_alias": True, }
def test_as_number_invalid_cases(self): """Test state_as_number with invalid cases.""" for _state in ('', 'foo', 'foo.bar', None, False, True, object, object()): with pytest.raises(ValueError): state.state_as_number(ha.State('domain.test', _state, {}))
def _state_to_bulk_action(self, state, time): """Creates a bulk action from the given state object""" try: _state = state_helper.state_as_number(state) if not is_valid_number(_state): _state = state.state except ValueError: _state = state.state if time.tzinfo is None: time_tz = time.astimezone(utc) else: time_tz = time orig_attributes = dict(state.attributes) attributes = dict() for orig_key, orig_value in orig_attributes.items(): # ES will attempt to expand any attribute keys which contain a ".", # so we replace them with an "_" instead. # https://github.com/legrego/homeassistant-elasticsearch/issues/92 key = str.replace(orig_key, ".", "_") value = orig_value # Skip any attributes with empty keys. Elasticsearch cannot index these. # https://github.com/legrego/homeassistant-elasticsearch/issues/96 if not key: LOGGER.warning( "Not publishing keyless attribute from entity [%s].", state.entity_id) continue # coerce set to list. ES does not handle sets natively if isinstance(orig_value, set): value = list(orig_value) # if the list/tuple contains simple strings, numbers, or booleans, then we should # index the contents as an actual list. Otherwise, we need to serialize # the contents so that we can respect the index mapping # (Arrays of objects cannot be indexed as-is) if value and isinstance(value, (list, tuple)): should_serialize = isinstance(value[0], (tuple, dict, set, list)) else: should_serialize = isinstance(value, dict) attributes[key] = self._serializer.dumps( value) if should_serialize else value document_body = { 'hass.domain': state.domain, 'hass.object_id': state.object_id, 'hass.object_id_lower': state.object_id.lower(), 'hass.entity_id': state.entity_id, 'hass.entity_id_lower': state.entity_id.lower(), 'hass.attributes': attributes, 'hass.value': _state, '@timestamp': time_tz } document_body.update(self._static_doc_properties) if ('latitude' in document_body['hass.attributes'] and 'longitude' in document_body['hass.attributes']): document_body['hass.geo.location'] = { 'lat': document_body['hass.attributes']['latitude'], 'lon': document_body['hass.attributes']['longitude'] } es_version = self._gateway.es_version if es_version.major == 6: return { "_op_type": "index", "_index": self._index_alias, "_type": "doc", "_source": document_body } return { "_op_type": "index", "_index": self._index_alias, "_source": document_body }
def run(script_args: List) -> int: """Run the actual script.""" from sqlalchemy import create_engine from sqlalchemy import func from sqlalchemy.orm import sessionmaker from influxdb import InfluxDBClient from homeassistant.components.recorder import models from homeassistant.helpers import state as state_helper from homeassistant.core import State from homeassistant.core import HomeAssistantError parser = argparse.ArgumentParser(description="import data to influxDB.") parser.add_argument( '-c', '--config', metavar='path_to_config_dir', default=config_util.get_default_config_dir(), help="Directory that contains the Home Assistant configuration") parser.add_argument( '--uri', type=str, help="Connect to URI and import (if other than default sqlite) " "eg: mysql://localhost/homeassistant") parser.add_argument('-d', '--dbname', metavar='dbname', required=True, help="InfluxDB database name") parser.add_argument('-H', '--host', metavar='host', default='127.0.0.1', help="InfluxDB host address") parser.add_argument('-P', '--port', metavar='port', default=8086, help="InfluxDB host port") parser.add_argument('-u', '--username', metavar='username', default='root', help="InfluxDB username") parser.add_argument('-p', '--password', metavar='password', default='root', help="InfluxDB password") parser.add_argument('-s', '--step', metavar='step', default=1000, help="How many points to import at the same time") parser.add_argument( '-t', '--tags', metavar='tags', default="", help="Comma separated list of tags (key:value) for all points") parser.add_argument('-D', '--default-measurement', metavar='default_measurement', default="", help="Store all your points in the same measurement") parser.add_argument('-o', '--override-measurement', metavar='override_measurement', default="", help="Store all your points in the same measurement") parser.add_argument('-e', '--exclude_entities', metavar='exclude_entities', default="", help="Comma separated list of excluded entities") parser.add_argument('-E', '--exclude_domains', metavar='exclude_domains', default="", help="Comma separated list of excluded domains") parser.add_argument( "-S", "--simulate", default=False, action="store_true", help=("Do not write points but simulate preprocessing and print " "statistics")) parser.add_argument('--script', choices=['influxdb_import']) args = parser.parse_args() simulate = args.simulate client = None if not simulate: client = InfluxDBClient(args.host, args.port, args.username, args.password) client.switch_database(args.dbname) config_dir = os.path.join(os.getcwd(), args.config) # type: str # Test if configuration directory exists if not os.path.isdir(config_dir): if config_dir != config_util.get_default_config_dir(): print(('Fatal Error: Specified configuration directory does ' 'not exist {} ').format(config_dir)) return 1 src_db = '{}/home-assistant_v2.db'.format(config_dir) if not os.path.exists(src_db) and not args.uri: print("Fatal Error: Database '{}' does not exist " "and no URI given".format(src_db)) return 1 uri = args.uri or 'sqlite:///{}'.format(src_db) engine = create_engine(uri, echo=False) session_factory = sessionmaker(bind=engine) session = session_factory() step = int(args.step) step_start = 0 tags = {} if args.tags: tags.update(dict(elem.split(':') for elem in args.tags.split(','))) excl_entities = args.exclude_entities.split(',') excl_domains = args.exclude_domains.split(',') override_measurement = args.override_measurement default_measurement = args.default_measurement # pylint: disable=assignment-from-no-return query = session.query(func.count(models.Events.event_type)).filter( models.Events.event_type == 'state_changed') total_events = query.scalar() prefix_format = '{} of {}' points = [] invalid_points = [] count = 0 from collections import defaultdict entities = defaultdict(int) print_progress(0, total_events, prefix_format.format(0, total_events)) while True: step_stop = step_start + step if step_start > total_events: print_progress(total_events, total_events, prefix_format.format(total_events, total_events)) break query = session.query(models.Events).filter( models.Events.event_type == 'state_changed').order_by( models.Events.time_fired).slice(step_start, step_stop) for event in query: event_data = json.loads(event.event_data) if not ('entity_id' in event_data) or ( excl_entities and event_data['entity_id'] in excl_entities ) or (excl_domains and event_data['entity_id'].split('.')[0] in excl_domains): session.expunge(event) continue try: state = State.from_dict(event_data.get('new_state')) except HomeAssistantError: invalid_points.append(event_data) if not state: invalid_points.append(event_data) continue try: _state = float(state_helper.state_as_number(state)) _state_key = 'value' except ValueError: _state = state.state _state_key = 'state' if override_measurement: measurement = override_measurement else: measurement = state.attributes.get('unit_of_measurement') if measurement in (None, ''): if default_measurement: measurement = default_measurement else: measurement = state.entity_id point = { 'measurement': measurement, 'tags': { 'domain': state.domain, 'entity_id': state.object_id, }, 'time': event.time_fired, 'fields': { _state_key: _state, } } for key, value in state.attributes.items(): if key != 'unit_of_measurement': # If the key is already in fields if key in point['fields']: key = key + '_' # Prevent column data errors in influxDB. # For each value we try to cast it as float # But if we can not do it we store the value # as string add "_str" postfix to the field key try: point['fields'][key] = float(value) except (ValueError, TypeError): new_key = '{}_str'.format(key) point['fields'][new_key] = str(value) entities[state.entity_id] += 1 point['tags'].update(tags) points.append(point) session.expunge(event) if points: if not simulate: client.write_points(points) count += len(points) # This prevents the progress bar from going over 100% when # the last step happens print_progress((step_start + len(points)), total_events, prefix_format.format(step_start, total_events)) else: print_progress((step_start + step), total_events, prefix_format.format(step_start, total_events)) points = [] step_start += step print("\nStatistics:") print("\n".join([ "{:6}: {}".format(v, k) for k, v in sorted(entities.items(), key=lambda x: x[1]) ])) print("\nInvalid Points: {}".format(len(invalid_points))) print("\nImport finished: {} points written".format(count)) return 0
async def test_as_number_coercion(hass): """Test state_as_number with number.""" for _state in ("0", "0.0", 0, 0.0): assert state.state_as_number(ha.State("domain.test", _state, {})) == 0.0 for _state in ("1", "1.0", 1, 1.0): assert state.state_as_number(ha.State("domain.test", _state, {})) == 1.0
def influx_event_listener(event): """Listen for new messages on the bus and sends them to Influx.""" state = event.data.get('new_state') if state is None or state.state in ( STATE_UNKNOWN, '', STATE_UNAVAILABLE) or \ state.entity_id in blacklist_e or \ state.domain in blacklist_d: return try: if (whitelist_e and state.entity_id not in whitelist_e) or \ (whitelist_d and state.domain not in whitelist_d): return _state = float(state_helper.state_as_number(state)) _state_key = "value" except ValueError: _state = state.state _state_key = "state" if override_measurement: measurement = override_measurement else: measurement = state.attributes.get('unit_of_measurement') if measurement in (None, ''): if default_measurement: measurement = default_measurement else: measurement = state.entity_id json_body = [ { 'measurement': measurement, 'tags': { 'domain': state.domain, 'entity_id': state.object_id, }, 'time': event.time_fired, 'fields': { _state_key: _state, } } ] for key, value in state.attributes.items(): if key != 'unit_of_measurement': # If the key is already in fields if key in json_body[0]['fields']: key = key + "_" # Prevent column data errors in influxDB. # For each value we try to cast it as float # But if we can not do it we store the value # as string add "_str" postfix to the field key try: json_body[0]['fields'][key] = float(value) except (ValueError, TypeError): new_key = "{}_str".format(key) json_body[0]['fields'][new_key] = str(value) json_body[0]['tags'].update(tags) try: influx.write_points(json_body) except exceptions.InfluxDBClientError: _LOGGER.exception("Error saving event %s to InfluxDB", json_body)
def influx_event_listener(event): """Listen for new messages on the bus and sends them to Influx.""" state = event.data.get('new_state') if state is None or state.state in ( STATE_UNKNOWN, '', STATE_UNAVAILABLE) or \ state.entity_id in blacklist_e or \ state.domain in blacklist_d: return try: if (whitelist_e and state.entity_id not in whitelist_e) or \ (whitelist_d and state.domain not in whitelist_d): return _include_state = _include_value = False _state_as_value = float(state.state) _include_value = True except ValueError: try: _state_as_value = float(state_helper.state_as_number(state)) _include_state = _include_value = True except ValueError: _include_state = True include_uom = True measurement = component_config.get( state.entity_id).get(CONF_OVERRIDE_MEASUREMENT) if measurement in (None, ''): if override_measurement: measurement = override_measurement else: measurement = state.attributes.get('unit_of_measurement') if measurement in (None, ''): if default_measurement: measurement = default_measurement else: measurement = state.entity_id else: include_uom = False json_body = [{ 'measurement': measurement, 'tags': { 'domain': state.domain, 'entity_id': state.object_id, }, 'time': event.time_fired, 'fields': {} }] if _include_state: json_body[0]['fields']['state'] = state.state if _include_value: json_body[0]['fields']['value'] = _state_as_value for key, value in state.attributes.items(): if key in tags_attributes: json_body[0]['tags'][key] = value elif key != 'unit_of_measurement' or include_uom: # If the key is already in fields if key in json_body[0]['fields']: key = key + "_" # Prevent column data errors in influxDB. # For each value we try to cast it as float # But if we can not do it we store the value # as string add "_str" postfix to the field key try: json_body[0]['fields'][key] = float(value) except (ValueError, TypeError): new_key = "{}_str".format(key) new_value = str(value) json_body[0]['fields'][new_key] = new_value if RE_DIGIT_TAIL.match(new_value): json_body[0]['fields'][key] = float( RE_DECIMAL.sub('', new_value)) json_body[0]['tags'].update(tags) _write_data(json_body)
def event_to_json(event): """Add an event to the outgoing Influx list.""" state = event.data.get("new_state") if (state is None or state.state in (STATE_UNKNOWN, "", STATE_UNAVAILABLE) or state.entity_id in blacklist_e or state.domain in blacklist_d): return try: if ((whitelist_e or whitelist_d) and state.entity_id not in whitelist_e and state.domain not in whitelist_d): return _include_state = _include_value = False _state_as_value = float(state.state) _include_value = True except ValueError: try: _state_as_value = float(state_helper.state_as_number(state)) _include_state = _include_value = True except ValueError: _include_state = True include_uom = True measurement = component_config.get( state.entity_id).get(CONF_OVERRIDE_MEASUREMENT) if measurement in (None, ""): if override_measurement: measurement = override_measurement else: measurement = state.attributes.get("unit_of_measurement") if measurement in (None, ""): if default_measurement: measurement = default_measurement else: measurement = state.entity_id else: include_uom = False json = { "measurement": measurement, "tags": { "domain": state.domain, "entity_id": state.object_id }, "time": event.time_fired, "fields": {}, } if _include_state: json["fields"]["state"] = state.state if _include_value: json["fields"]["value"] = _state_as_value for key, value in state.attributes.items(): if key in tags_attributes: json["tags"][key] = value elif key != "unit_of_measurement" or include_uom: # If the key is already in fields if key in json["fields"]: key = f"{key}_" # Prevent column data errors in influxDB. # For each value we try to cast it as float # But if we can not do it we store the value # as string add "_str" postfix to the field key try: json["fields"][key] = float(value) except (ValueError, TypeError): new_key = f"{key}_str" new_value = str(value) json["fields"][new_key] = new_value if RE_DIGIT_TAIL.match(new_value): json["fields"][key] = float( RE_DECIMAL.sub("", new_value)) # Infinity and NaN are not valid floats in InfluxDB try: if not math.isfinite(json["fields"][key]): del json["fields"][key] except (KeyError, TypeError): pass json["tags"].update(tags) return json
async def test_as_number_invalid_cases(hass): """Test state_as_number with invalid cases.""" for _state in ("", "foo", "foo.bar", None, False, True, object, object()): with pytest.raises(ValueError): state.state_as_number(ha.State("domain.test", _state, {}))