Exemplo n.º 1
0
async def test_logbook_humanify_automation_triggered_event(hass):
    """Test humanifying Automation Trigger event."""
    await async_setup_component(hass, automation.DOMAIN, {})

    event1, event2 = list(
        logbook.humanify(
            hass,
            [
                Event(
                    EVENT_AUTOMATION_TRIGGERED,
                    {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"},
                ),
                Event(
                    EVENT_AUTOMATION_TRIGGERED,
                    {ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation"},
                ),
            ],
        )
    )

    assert event1["name"] == "Hello Automation"
    assert event1["domain"] == "automation"
    assert event1["message"] == "has been triggered"
    assert event1["entity_id"] == "automation.hello"

    assert event2["name"] == "Bye Automation"
    assert event2["domain"] == "automation"
    assert event2["message"] == "has been triggered"
    assert event2["entity_id"] == "automation.bye"
Exemplo n.º 2
0
    def test_homekit_pyhap_interaction(self, mock_get_accessory,
                                       mock_import_types, mock_driver_start,
                                       mock_driver_stop, mock_file_persist):
        """Test the interaction between the homekit class and pyhap."""
        acc1 = TemperatureSensor(self.hass, 'sensor.temp', 'Temperature')
        acc2 = Window(self.hass, 'cover.hall_window', 'Cover')
        mock_get_accessory.side_effect = [acc1, acc2]

        homekit = Homekit(self.hass, 51826)
        homekit.setup_bridge(b'123-45-678')

        self.assertEqual(homekit.bridge.display_name, BRIDGE_NAME)

        self.hass.states.set('demo.demo1', 'on')
        self.hass.states.set('demo.demo2', 'off')

        self.hass.start()
        self.hass.block_till_done()

        homekit.start_driver(Event(EVENT_HOMEASSISTANT_START))

        self.assertEqual(mock_get_accessory.call_count, 2)
        self.assertEqual(mock_import_types.call_count, 1)
        self.assertEqual(mock_driver_start.call_count, 1)

        accessories = homekit.bridge.accessories
        self.assertEqual(accessories[2], acc1)
        self.assertEqual(accessories[3], acc2)

        mock_driver_stop.assert_not_called()

        self.hass.bus.fire(EVENT_HOMEASSISTANT_STOP)
        self.hass.block_till_done()

        self.assertEqual(mock_driver_stop.call_count, 1)
Exemplo n.º 3
0
    def test_homekit_class(self, mock_acc_driver):
        """Test interaction between the HomeKit class and pyhap."""
        with patch(PATH_HOMEKIT + '.accessories.HomeBridge') as mock_bridge:
            homekit = HomeKit(self.hass, 51826)
            homekit.setup_bridge(b'123-45-678')

        mock_bridge.reset_mock()
        self.hass.states.set('demo.demo1', 'on')
        self.hass.states.set('demo.demo2', 'off')

        with patch(PATH_HOMEKIT + '.get_accessory') as mock_get_acc, \
            patch(PATH_HOMEKIT + '.import_types') as mock_import_types, \
                patch('homeassistant.util.get_local_ip') as mock_ip:
            mock_get_acc.side_effect = ['TempSensor', 'Window']
            mock_ip.return_value = IP_ADDRESS
            homekit.start_driver(Event(EVENT_HOMEASSISTANT_START))

        path = self.hass.config.path(HOMEKIT_FILE)

        self.assertEqual(mock_import_types.call_count, 1)
        self.assertEqual(mock_get_acc.call_count, 2)
        self.assertEqual(mock_bridge.mock_calls, [
            call().add_accessory('TempSensor'),
            call().add_accessory('Window')
        ])
        self.assertEqual(
            mock_acc_driver.mock_calls,
            [call(homekit.bridge, 51826, IP_ADDRESS, path),
             call().start()])
        mock_acc_driver.reset_mock()

        self.hass.bus.fire(EVENT_HOMEASSISTANT_STOP)
        self.hass.block_till_done()

        self.assertEqual(mock_acc_driver.mock_calls, [call().stop()])
    async def handle_event(self, event):
        """Handle incoming event for motion detection and availability."""

        try:
            self._available = event.data["available"]
            return
        except KeyError:
            pass

        if not self._available:
            return

        try:
            self._last_event_state = bool(self._event_state)
            self._event_state = event.data["motion"]
        except KeyError:
            return

        try:
            await self._base.api.get_all_motion_states()
            self._event_state = self._base.api.motion_state
        except:
            _LOGGER.error("Motion states could not be queried from API")
            _LOGGER.error(traceback.format_exc())
            return

        if self._event_state:
            self._last_motion = datetime.datetime.now()
        else:
            if self._base.motion_off_delay > 0:
                await asyncio.sleep(self._base.motion_off_delay)

        if self._base.api.ai_state:
            # send an event to AI based motion sensor entities
            if self._base.sensor_person_detection is not None:
                await self._base.sensor_person_detection.handle_event(
                    Event(self._base.event_id, {"ai_refreshed": True}))
            if self._base.sensor_vehicle_detection is not None:
                await self._base.sensor_vehicle_detection.handle_event(
                    Event(self._base.event_id, {"ai_refreshed": True}))
            if self._base.sensor_pet_detection is not None:
                await self._base.sensor_pet_detection.handle_event(
                    Event(self._base.event_id, {"ai_refreshed": True}))

        if self.enabled:
            self.async_schedule_update_ha_state()
Exemplo n.º 5
0
 async def async_update_motion_states():
     """Perform motion state updates in case webhooks are not functional"""
     # _LOGGER.debug("Refreshing motion states for camera ({}/{})".format(base.name, base.api.host))
     async with async_timeout.timeout(base.timeout):
         # Force a refresh of motion sensors (in case Webhook is broken)
         if base.sensor_motion_detection is not None:
             # hass.bus.async_fire(base.event_id, {"motion": False})
             await base.sensor_motion_detection.handle_event(
                 Event(base.event_id, {"motion": True}))
Exemplo n.º 6
0
 def _bucket_updated(self, event: Event):
     """Receive the bucket updated event."""
     # update the sensor status.
     event_dict = event.as_dict()
     self.bucket = float(event_dict["data"][CONF_BUCKET])
     _LOGGER.info(
         "_bucket_updated, received bucket value {} from event_dict: {}".
         format(self.bucket, event_dict))
     self.update_adjusted_run_time_from_event()
Exemplo n.º 7
0
def row_to_event(row):
    """ Convert a databse row to an event. """
    try:
        return Event(row[1], json.loads(row[2]), EventOrigin(row[3]),
                     dt_util.utc_from_timestamp(row[5]))
    except ValueError:
        # When json.loads fails
        _LOGGER.exception("Error converting row to event: %s", row)
        return None
 def _service_callback(self, service_call):
     self.publish_event(
         Event(
             event_type=EVENT_CALL_SERVICE,
             data={
                 ATTR_DOMAIN: service_call.domain,
                 ATTR_SERVICE: service_call.service,
                 ATTR_SERVICE_DATA: service_call.data or {}
             },
             origin=EventOrigin.remote))
Exemplo n.º 9
0
 def to_native(self):
     """Convert to a natve HA Event."""
     try:
         return Event(self.event_type, json.loads(self.event_data),
                      EventOrigin(self.origin),
                      _process_timestamp(self.time_fired))
     except ValueError:
         # When json.loads fails
         _LOGGER.exception("Error converting to event: %s", self)
         return None
Exemplo n.º 10
0
def test_find_unserializable_data():
    """Find unserializeable data."""
    assert find_paths_unserializable_data(1) == {}
    assert find_paths_unserializable_data([1, 2]) == {}
    assert find_paths_unserializable_data({"something": "yo"}) == {}

    assert find_paths_unserializable_data({"something": set()}) == {
        "$.something": set()
    }
    assert find_paths_unserializable_data({"something": [1, set()]}) == {
        "$.something[1]": set()
    }
    assert find_paths_unserializable_data([1, {
        "bla": set(),
        "blub": set()
    }]) == {
        "$[1].bla": set(),
        "$[1].blub": set(),
    }
    assert find_paths_unserializable_data({("A", ): 1}) == {
                                               "$<key: ('A',)>": ("A", )
                                           }
    assert math.isnan(
        find_paths_unserializable_data(float("nan"),
                                       dump=partial(dumps,
                                                    allow_nan=False))["$"])

    # Test custom encoder + State support.

    class MockJSONEncoder(JSONEncoder):
        """Mock JSON encoder."""
        def default(self, o):
            """Mock JSON encode method."""
            if isinstance(o, datetime):
                return o.isoformat()
            return super().default(o)

    bad_data = object()

    assert (find_paths_unserializable_data(
        [State("mock_domain.mock_entity", "on", {"bad": bad_data})],
        dump=partial(dumps, cls=MockJSONEncoder),
    ) == {
        "$[0](state: mock_domain.mock_entity).attributes.bad": bad_data
    })

    assert (find_paths_unserializable_data(
        [Event("bad_event", {"bad_attribute": bad_data})],
        dump=partial(dumps, cls=MockJSONEncoder),
    ) == {
        "$[0](event: bad_event).data.bad_attribute": bad_data
    })
Exemplo n.º 11
0
async def test_cleanup_event_notifiers(hass: HomeAssistant) -> None:
    """Test cleanup function clears all event notifiers."""
    domain_data = get_domain_data(hass)
    await domain_data.async_get_event_notifier(EventListenAddr(None, 0, None),
                                               hass)
    await domain_data.async_get_event_notifier(
        EventListenAddr(None, 0, "different"), hass)

    await domain_data.async_cleanup_event_notifiers(
        Event(EVENT_HOMEASSISTANT_STOP))

    assert not domain_data.event_notifiers
    assert not domain_data.event_notifier_refs
Exemplo n.º 12
0
    async def async_added_to_hass(self):
        """Handle added to Hass."""
        self.async_on_remove(
            async_track_state_change_event(
                self.hass, self._entity_ids, self._async_min_max_sensor_state_listener
            )
        )

        # Replay current state of source entities
        for entity_id in self._entity_ids:
            state = self.hass.states.get(entity_id)
            state_event = Event("", {"entity_id": entity_id, "new_state": state})
            self._async_min_max_sensor_state_listener(state_event, update_state=False)

        self._calc_values()
def _state_to_event(new_state, old_state=None):
    if new_state is None:
        return None
    new_state_dict = new_state.as_dict()
    old_state_dict = old_state.as_dict() if old_state is not None else new_state.as_dict()

    return Event(
        event_type=EVENT_STATE_CHANGED,
        data={
            ATTR_ENTITY_ID: new_state.entity_id,
            ATTR_OLD_STATE: _add_state_attributes(new_state_dict, new_state.entity_id),
            ATTR_NEW_STATE: _add_state_attributes(old_state_dict, new_state.entity_id)
        },
        origin=EventOrigin.remote
    )
Exemplo n.º 14
0
 def to_native(self):
     """Convert to a natve HA Event."""
     context = Context(id=self.context_id, user_id=self.context_user_id)
     try:
         return Event(
             self.event_type,
             json.loads(self.event_data),
             EventOrigin(self.origin),
             process_timestamp(self.time_fired),
             context=context,
         )
     except ValueError:
         # When json.loads fails
         _LOGGER.exception("Error converting to event: %s", self)
         return None
Exemplo n.º 15
0
 def to_native(self, validate_entity_id: bool = True) -> Event | None:
     """Convert to a native HA Event."""
     context = Context(
         id=self.context_id,
         user_id=self.context_user_id,
         parent_id=self.context_parent_id,
     )
     try:
         return Event(
             self.event_type,
             json_loads(self.event_data) if self.event_data else {},
             EventOrigin(self.origin)
             if self.origin else EVENT_ORIGIN_ORDER[self.origin_idx],
             process_timestamp(self.time_fired),
             context=context,
         )
     except JSON_DECODE_EXCEPTIONS:
         # When json_loads fails
         _LOGGER.exception("Error converting to event: %s", self)
         return None
Exemplo n.º 16
0
 def _bucket_updated(self, event: Event):
     """Receive the bucket updated event."""
     # update the sensor status.
     event_dict = event.as_dict()
     self.bucket = float(event_dict["data"][CONF_BUCKET])
     self.update_adjusted_run_time_from_event()
def main():
    """
    Connect to both databases and migrate data
    """

    parser = argparse.ArgumentParser()
    parser.add_argument('--user',
                        '-u',
                        dest='user',
                        action='store',
                        required=True,
                        help='MySQL/MariaDB username')
    parser.add_argument('--password',
                        "-p",
                        dest='password',
                        action='store',
                        help='MySQL/MariaDB password')
    parser.add_argument('--host',
                        '-s',
                        dest='host',
                        action='store',
                        required=True,
                        help='MySQL/MariaDB host')
    parser.add_argument('--port',
                        '-o',
                        dest='port',
                        action='store',
                        required=True,
                        help='MySQL/MariaDB host')
    parser.add_argument(
        '--database',
        '-d',
        dest='database',
        action='store',
        required=false,
        type=int,
        default=3306,
        help='MySQL/MariaDB port. MySQL 3306 (default), MariaDB 3307')
    parser.add_argument(
        '--count',
        '-c',
        dest='row_count',
        action='store',
        required=False,
        type=int,
        default=0,
        help=
        'If 0 (default), determine upper bound of number of rows by querying database, '
        'otherwise use this number (used for progress bar only)')

    args = parser.parse_args()

    # load InfluxDB configuration file (the one from Home Assistant) (without using !secrets)
    with open("influxdb.yaml") as config_file:
        influx_config = yaml.load(config_file, Loader=yaml.FullLoader)

    # validate and extend config
    schema = vol.Schema(INFLUX_SCHEMA, extra=vol.ALLOW_EXTRA)
    influx_config = schema(influx_config)

    # establish connection to InfluxDB
    influx = get_influx_connection(influx_config,
                                   test_write=True,
                                   test_read=True)
    converter = _generate_event_to_json(influx_config)

    # connect to MySQL/MariaDB database
    connection = mysql_connect(host=args.host,
                               port=args.port,
                               user=args.user,
                               password=args.password,
                               database=args.database,
                               cursorclass=cursors.SSCursor,
                               charset="utf8")
    cursor = connection.cursor()

    # untested: connect to SQLite file instead (you need to get rid of the first three `add_argument` calls above)
    #connection = sqlite3.connect('home_assistant_v2.db')

    if args.row_count == 0:
        # query number of rows in states table - this will be more than the number of rows we
        # are going to process, but at least it gives us some percentage and estimation
        cursor.execute("select COUNT(*) from states")
        total = cursor.fetchone()[0]
    else:
        total = args.row_count

    # select the values we are interested in
    cursor.execute(
        "select states.entity_id, states.state, states.attributes, events.event_type, events.time_fired from states, events where events.event_id = states.event_id"
    )

    # map to count names and number of measurements for each entity
    statistics = {}

    # convert each row, write to influxdb in batches
    batch_size_max = 512
    batch_size_cur = 0
    batch_json = []
    with tqdm(total=total, mininterval=1, unit=" rows",
              unit_scale=True) as progress_bar:
        for row in cursor:
            progress_bar.update(1)

            try:
                _entity_id = rename_entity_id(row[0])
                _state = row[1]
                _attributes_raw = row[2]
                _attributes = rename_friendly_name(json.loads(_attributes_raw))
                _event_type = row[3]
                _time_fired = row[4]
            except Exception as e:
                print("Failed extracting data from %s: %s.\nAttributes: %s" %
                      (row, e, _attributes_raw))
                continue

            try:
                # recreate state and event
                state = State(entity_id=_entity_id,
                              state=_state,
                              attributes=_attributes)
                event = Event(_event_type,
                              data={"new_state": state},
                              time_fired=_time_fired)
            except InvalidEntityFormatError:
                pass
            else:
                data = converter(event)
                if not data:
                    continue

                # collect statistics (remove this code block to speed up processing slightly)
                if "friendly_name" in _attributes:
                    friendly_name = _attributes["friendly_name"]

                    if _entity_id not in statistics:
                        statistics[_entity_id] = {friendly_name: 1}
                    elif friendly_name not in statistics[_entity_id]:
                        statistics[_entity_id][friendly_name] = 1
                        print(
                            "Found new name '%s' for entity '%s'. All names known so far: %s"
                            % (friendly_name, _entity_id,
                               statistics[_entity_id].keys()))
                        print(row)
                    else:
                        statistics[_entity_id][friendly_name] += 1

                batch_json.append(data)
                batch_size_cur += 1

                if batch_size_cur >= batch_size_max:
                    influx.write(batch_json)
                    batch_json = []
                    batch_size_cur = 0

    influx.write(batch_json)
    influx.close()

    # print statistics - ideally you have one friendly name per entity_id
    # you can use the output to see where the same sensor has had different
    # names, as well as which entities do not have lots of measurements and
    # thus could be ignored (add them to exclude/entities in the influxdb yaml)
    for entity in sorted(statistics.keys()):
        print(entity)
        for friendly_name in sorted(statistics[entity].keys()):
            count = statistics[entity][friendly_name]
            print("  - %s (%d)" % (friendly_name, count))