예제 #1
0
    def test_single_retry(self):
        """Test that retry stops after a single try if configured."""
        mock_method = MagicMock()
        retryer = influxdb.RetryOnError(self.hass, retry_limit=1)
        wrapped = retryer(mock_method)
        wrapped(1, 2, test=3)
        self.assertEqual(mock_method.call_count, 1)
        mock_method.assert_called_with(1, 2, test=3)

        start = dt_util.utcnow()
        shifted_time = start + (timedelta(seconds=20 + 1))
        self.hass.bus.fire(ha.EVENT_TIME_CHANGED,
                           {ha.ATTR_NOW: shifted_time})
        self.hass.block_till_done()
        self.assertEqual(mock_method.call_count, 1)

        mock_method.side_effect = Exception()
        wrapped(1, 2, test=3)
        self.assertEqual(mock_method.call_count, 2)
        mock_method.assert_called_with(1, 2, test=3)

        for cnt in range(3):
            start = dt_util.utcnow()
            shifted_time = start + (timedelta(seconds=20 + 1))
            self.hass.bus.fire(ha.EVENT_TIME_CHANGED,
                               {ha.ATTR_NOW: shifted_time})
            self.hass.block_till_done()
            self.assertEqual(mock_method.call_count, 3)
            mock_method.assert_called_with(1, 2, test=3)
예제 #2
0
def test_monitored_conditions():
    """Test the filtering of attributes."""
    ctrl = mock.MagicMock()
    fake_clients = [
        {'mac': '123',
         'hostname': 'foobar',
         'essid': 'barnet',
         'signal': -60,
         'last_seen': dt_util.as_timestamp(dt_util.utcnow())},
        {'mac': '234',
         'name': 'Nice Name',
         'essid': 'barnet',
         'signal': -42,
         'last_seen': dt_util.as_timestamp(dt_util.utcnow())},
        {'mac': '456',
         'hostname': 'wired',
         'essid': 'barnet',
         'last_seen': dt_util.as_timestamp(dt_util.utcnow())},
    ]
    ctrl.get_clients.return_value = fake_clients
    scanner = unifi.UnifiScanner(ctrl, DEFAULT_DETECTION_TIME, None,
                                 ['essid', 'signal'])
    assert scanner.get_extra_attributes('123') == {'essid': 'barnet',
                                                   'signal': -60}
    assert scanner.get_extra_attributes('234') == {'essid': 'barnet',
                                                   'signal': -42}
    assert scanner.get_extra_attributes('456') == {'essid': 'barnet'}
예제 #3
0
    def test_stop_covers(self):
        """Test stop cover function."""
        with assert_setup_component(2, DOMAIN):
            assert setup.setup_component(self.hass, DOMAIN, CONFIG)

        cover.open_cover(self.hass, COVER_GROUP)
        self.hass.block_till_done()
        future = dt_util.utcnow() + timedelta(seconds=1)
        fire_time_changed(self.hass, future)
        self.hass.block_till_done()
        cover.stop_cover(self.hass, COVER_GROUP)
        self.hass.block_till_done()
        future = dt_util.utcnow() + timedelta(seconds=1)
        fire_time_changed(self.hass, future)
        self.hass.block_till_done()

        state = self.hass.states.get(COVER_GROUP)
        self.assertEqual(state.state, STATE_OPEN)
        self.assertEqual(state.attributes.get(ATTR_CURRENT_POSITION), 100)

        self.assertEqual(self.hass.states.get(DEMO_COVER).state, STATE_OPEN)
        self.assertEqual(self.hass.states.get(DEMO_COVER_POS)
                         .attributes.get(ATTR_CURRENT_POSITION), 20)
        self.assertEqual(self.hass.states.get(DEMO_COVER_TILT)
                         .attributes.get(ATTR_CURRENT_POSITION), 80)
예제 #4
0
    async def update_image(self, image, filename):
        """Update the camera image."""
        if self._state == STATE_IDLE:
            self._state = STATE_RECORDING
            self._last_trip = dt_util.utcnow()
            self.queue.clear()

        self._filename = filename
        self.queue.appendleft(image)

        @callback
        def reset_state(now):
            """Set state to idle after no new images for a period of time."""
            self._state = STATE_IDLE
            self._expired_listener = None
            _LOGGER.debug("Reset state")
            self.async_schedule_update_ha_state()

        if self._expired_listener:
            self._expired_listener()

        self._expired_listener = async_track_point_in_utc_time(
            self.hass, reset_state, dt_util.utcnow() + self._timeout)

        self.async_schedule_update_ha_state()
예제 #5
0
    def _learn_command(call):
        try:
            auth = yield from hass.loop.run_in_executor(None,
                                                        broadlink_device.auth)
        except socket.timeout:
            _LOGGER.error("Failed to connect to device, timeout.")
            return
        if not auth:
            _LOGGER.error("Failed to connect to device.")
            return

        yield from hass.loop.run_in_executor(None,
                                             broadlink_device.enter_learning)

        _LOGGER.info("Press the key you want HASS to learn")
        start_time = utcnow()
        while (utcnow() - start_time) < timedelta(seconds=20):
            packet = yield from hass.loop.run_in_executor(None,
                                                          broadlink_device.
                                                          check_data)
            if packet:
                log_msg = 'Recieved packet is: {}'.\
                          format(b64encode(packet).decode('utf8'))
                _LOGGER.info(log_msg)
                persistent_notification.async_create(hass, log_msg,
                                                     title='Broadlink switch')
                return
            yield from asyncio.sleep(1, loop=hass.loop)
        _LOGGER.error('Did not received any signal.')
        persistent_notification.async_create(hass,
                                             "Did not received any signal",
                                             title='Broadlink switch')
예제 #6
0
    def test_send_code_delay(self, mock_pilight_error):
        """Try to send proper data with delay afterwards."""
        with assert_setup_component(4):
            self.assertTrue(setup_component(
                self.hass, pilight.DOMAIN,
                {pilight.DOMAIN: {pilight.CONF_SEND_DELAY: 5.0}}))

            # Call with protocol info, should not give error
            service_data1 = {'protocol': 'test11',
                             'value': 42}
            service_data2 = {'protocol': 'test22',
                             'value': 42}
            self.hass.services.call(pilight.DOMAIN, pilight.SERVICE_NAME,
                                    service_data=service_data1,
                                    blocking=True)
            self.hass.services.call(pilight.DOMAIN, pilight.SERVICE_NAME,
                                    service_data=service_data2,
                                    blocking=True)
            service_data1['protocol'] = [service_data1['protocol']]
            service_data2['protocol'] = [service_data2['protocol']]

            self.hass.bus.fire(ha.EVENT_TIME_CHANGED,
                               {ha.ATTR_NOW: dt_util.utcnow()})
            self.hass.block_till_done()
            error_log_call = mock_pilight_error.call_args_list[-1]
            self.assertTrue(str(service_data1) in str(error_log_call))

            new_time = dt_util.utcnow() + timedelta(seconds=5)
            self.hass.bus.fire(ha.EVENT_TIME_CHANGED,
                               {ha.ATTR_NOW: new_time})
            self.hass.block_till_done()
            error_log_call = mock_pilight_error.call_args_list[-1]
            self.assertTrue(str(service_data2) in str(error_log_call))
예제 #7
0
async def test_if_fires_when_hour_matches(hass, calls):
    """Test for firing if hour is matching."""
    assert await async_setup_component(hass, automation.DOMAIN, {
        automation.DOMAIN: {
            'trigger': {
                'platform': 'time_pattern',
                'hours': 0,
                'minutes': '*',
                'seconds': '*',
            },
            'action': {
                'service': 'test.automation'
            }
        }
    })

    async_fire_time_changed(hass, dt_util.utcnow().replace(hour=0))
    await hass.async_block_till_done()
    assert 1 == len(calls)

    await common.async_turn_off(hass)
    await hass.async_block_till_done()

    async_fire_time_changed(hass, dt_util.utcnow().replace(hour=0))
    await hass.async_block_till_done()
    assert 1 == len(calls)
예제 #8
0
    def test_state_changes_during_period(self):
        self.init_recorder()
        entity_id = "media_player.test"

        def set_state(state):
            self.hass.states.set(entity_id, state)
            self.hass.pool.block_till_done()
            recorder._INSTANCE.block_till_done()

            return self.hass.states.get(entity_id)

        set_state("idle")
        set_state("YouTube")

        start = dt_util.utcnow()

        time.sleep(1)

        states = [set_state("idle"), set_state("Netflix"), set_state("Plex"), set_state("YouTube")]

        time.sleep(1)

        end = dt_util.utcnow()

        set_state("Netflix")
        set_state("Plex")

        self.assertEqual({entity_id: states}, history.state_changes_during_period(start, end, entity_id))
예제 #9
0
async def test_default_values(hass, calls):
    """Test for firing at 2 minutes every hour."""
    assert await async_setup_component(hass, automation.DOMAIN, {
        automation.DOMAIN: {
            'trigger': {
                'platform': 'time_pattern',
                'minutes': "2",
            },
            'action': {
                'service': 'test.automation'
            }
        }
    })

    async_fire_time_changed(hass, dt_util.utcnow().replace(
        hour=1, minute=2, second=0))

    await hass.async_block_till_done()
    assert 1 == len(calls)

    async_fire_time_changed(hass, dt_util.utcnow().replace(
        hour=1, minute=2, second=1))

    await hass.async_block_till_done()
    assert 1 == len(calls)

    async_fire_time_changed(hass, dt_util.utcnow().replace(
        hour=2, minute=2, second=0))

    await hass.async_block_till_done()
    assert 2 == len(calls)
예제 #10
0
async def test_stop_covers(hass, setup_comp):
    """Test stop cover function."""
    await hass.services.async_call(
        DOMAIN, SERVICE_OPEN_COVER,
        {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True)
    future = dt_util.utcnow() + timedelta(seconds=1)
    async_fire_time_changed(hass, future)
    await hass.async_block_till_done()

    await hass.services.async_call(
        DOMAIN, SERVICE_STOP_COVER,
        {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True)
    future = dt_util.utcnow() + timedelta(seconds=1)
    async_fire_time_changed(hass, future)
    await hass.async_block_till_done()

    state = hass.states.get(COVER_GROUP)
    assert state.state == STATE_OPEN
    assert state.attributes.get(ATTR_CURRENT_POSITION) == 100

    assert hass.states.get(DEMO_COVER).state == STATE_OPEN
    assert hass.states.get(DEMO_COVER_POS) \
        .attributes.get(ATTR_CURRENT_POSITION) == 20
    assert hass.states.get(DEMO_COVER_TILT) \
        .attributes.get(ATTR_CURRENT_POSITION) == 80
예제 #11
0
        async def _check_awaked():
            """Wait for Z-wave awaked state (or timeout) and finalize start."""
            _LOGGER.debug(
                "network state: %d %s", network.state,
                network.state_str)

            start_time = dt_util.utcnow()
            while True:
                waited = int((dt_util.utcnow()-start_time).total_seconds())

                if network.state >= network.STATE_AWAKED:
                    # Need to be in STATE_AWAKED before talking to nodes.
                    _LOGGER.info("Z-Wave ready after %d seconds", waited)
                    break
                elif waited >= const.NETWORK_READY_WAIT_SECS:
                    # Wait up to NETWORK_READY_WAIT_SECS seconds for the Z-Wave
                    # network to be ready.
                    _LOGGER.warning(
                        "Z-Wave not ready after %d seconds, continuing anyway",
                        waited)
                    _LOGGER.info(
                        "final network state: %d %s", network.state,
                        network.state_str)
                    break
                else:
                    await asyncio.sleep(1, loop=hass.loop)

            hass.async_add_job(_finalize_start)
예제 #12
0
    async def _learn_command(call):
        """Handle a learn command."""
        try:
            auth = await hass.async_add_job(broadlink_device.auth)
        except socket.timeout:
            _LOGGER.error("Failed to connect to device, timeout")
            return
        if not auth:
            _LOGGER.error("Failed to connect to device")
            return

        await hass.async_add_job(broadlink_device.enter_learning)

        _LOGGER.info("Press the key you want Home Assistant to learn")
        start_time = utcnow()
        while (utcnow() - start_time) < timedelta(seconds=20):
            packet = await hass.async_add_job(
                broadlink_device.check_data)
            if packet:
                log_msg = "Received packet is: {}".\
                          format(b64encode(packet).decode('utf8'))
                _LOGGER.info(log_msg)
                hass.components.persistent_notification.async_create(
                    log_msg, title='Broadlink switch')
                return
            await asyncio.sleep(1, loop=hass.loop)
        _LOGGER.error("Did not received any signal")
        hass.components.persistent_notification.async_create(
            "Did not received any signal", title='Broadlink switch')
예제 #13
0
        async def _learn_command(call):
            """Learn a packet from remote."""
            device = hass.data[DOMAIN][call.data[CONF_HOST]]

            try:
                auth = await hass.async_add_executor_job(device.auth)
            except socket.timeout:
                _LOGGER.error("Failed to connect to device, timeout")
                return
            if not auth:
                _LOGGER.error("Failed to connect to device")
                return

            await hass.async_add_executor_job(device.enter_learning)

            _LOGGER.info("Press the key you want Home Assistant to learn")
            start_time = utcnow()
            while (utcnow() - start_time) < timedelta(seconds=20):
                packet = await hass.async_add_executor_job(
                    device.check_data)
                if packet:
                    data = b64encode(packet).decode('utf8')
                    log_msg = "Received packet is: {}".\
                              format(data)
                    _LOGGER.info(log_msg)
                    hass.components.persistent_notification.async_create(
                        log_msg, title='Broadlink switch')
                    return
                await asyncio.sleep(1)
            _LOGGER.error("No signal was received")
            hass.components.persistent_notification.async_create(
                "No signal was received", title='Broadlink switch')
예제 #14
0
 def async_create_access_token(self, refresh_token):
     """Create a new access token."""
     # pylint: disable=no-self-use
     return jwt.encode({
         'iss': refresh_token.id,
         'iat': dt_util.utcnow(),
         'exp': dt_util.utcnow() + refresh_token.access_token_expiration,
     }, refresh_token.jwt_key, algorithm='HS256').decode()
예제 #15
0
def test_template_delay_off(hass):
    """Test binary sensor template delay off."""
    config = {
        'binary_sensor': {
            'platform': 'template',
            'sensors': {
                'test': {
                    'friendly_name': 'virtual thingy',
                    'value_template':
                        "{{ states.sensor.test_state.state == 'on' }}",
                    'device_class': 'motion',
                    'delay_off': 5
                },
            },
        },
    }
    hass.states.async_set('sensor.test_state', 'on')
    yield from setup.async_setup_component(hass, 'binary_sensor', config)
    yield from hass.async_start()

    hass.states.async_set('sensor.test_state', 'off')
    yield from hass.async_block_till_done()

    state = hass.states.get('binary_sensor.test')
    assert state.state == 'on'

    future = dt_util.utcnow() + timedelta(seconds=5)
    async_fire_time_changed(hass, future)
    yield from hass.async_block_till_done()

    state = hass.states.get('binary_sensor.test')
    assert state.state == 'off'

    # check with time changes
    hass.states.async_set('sensor.test_state', 'on')
    yield from hass.async_block_till_done()

    state = hass.states.get('binary_sensor.test')
    assert state.state == 'on'

    hass.states.async_set('sensor.test_state', 'off')
    yield from hass.async_block_till_done()

    state = hass.states.get('binary_sensor.test')
    assert state.state == 'on'

    hass.states.async_set('sensor.test_state', 'on')
    yield from hass.async_block_till_done()

    state = hass.states.get('binary_sensor.test')
    assert state.state == 'on'

    future = dt_util.utcnow() + timedelta(seconds=5)
    async_fire_time_changed(hass, future)
    yield from hass.async_block_till_done()

    state = hass.states.get('binary_sensor.test')
    assert state.state == 'on'
예제 #16
0
    def test_trigger_with_specific_pending(self):
        """Test arm home method."""
        self.assertTrue(setup_component(
            self.hass, alarm_control_panel.DOMAIN,
            {'alarm_control_panel': {
                'platform': 'manual_mqtt',
                'name': 'test',
                'pending_time': 10,
                'triggered': {
                    'pending_time': 2
                },
                'trigger_time': 3,
                'disarm_after_trigger': False,
                'command_topic': 'alarm/command',
                'state_topic': 'alarm/state',
            }}))

        entity_id = 'alarm_control_panel.test'

        alarm_control_panel.alarm_arm_home(self.hass)
        self.hass.block_till_done()

        self.assertEqual(STATE_ALARM_PENDING,
                         self.hass.states.get(entity_id).state)

        future = dt_util.utcnow() + timedelta(seconds=10)
        with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.'
                    'dt_util.utcnow'), return_value=future):
            fire_time_changed(self.hass, future)
            self.hass.block_till_done()

        self.assertEqual(STATE_ALARM_ARMED_HOME,
                         self.hass.states.get(entity_id).state)

        alarm_control_panel.alarm_trigger(self.hass)
        self.hass.block_till_done()

        self.assertEqual(STATE_ALARM_PENDING,
                         self.hass.states.get(entity_id).state)

        future = dt_util.utcnow() + timedelta(seconds=2)
        with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.'
                    'dt_util.utcnow'), return_value=future):
            fire_time_changed(self.hass, future)
            self.hass.block_till_done()

        self.assertEqual(STATE_ALARM_TRIGGERED,
                         self.hass.states.get(entity_id).state)

        future = dt_util.utcnow() + timedelta(seconds=5)
        with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.'
                    'dt_util.utcnow'), return_value=future):
            fire_time_changed(self.hass, future)
            self.hass.block_till_done()

        self.assertEqual(STATE_ALARM_ARMED_HOME,
                         self.hass.states.get(entity_id).state)
예제 #17
0
    def async_run(self, variables: Optional[Sequence]=None) -> None:
        """Run script.

        This method is a coroutine.
        """
        self.last_triggered = date_util.utcnow()
        if self._cur == -1:
            self._log('Running script')
            self._cur = 0

        # Unregister callback if we were in a delay but turn on is called
        # again. In that case we just continue execution.
        self._async_remove_listener()

        for cur, action in islice(enumerate(self.sequence), self._cur,
                                  None):

            if CONF_DELAY in action:
                # Call ourselves in the future to continue work
                @asyncio.coroutine
                def script_delay(now):
                    """Called after delay is done."""
                    self._async_unsub_delay_listener = None
                    self.hass.async_add_job(self.async_run(variables))

                delay = action[CONF_DELAY]

                if isinstance(delay, template.Template):
                    delay = vol.All(
                        cv.time_period,
                        cv.positive_timedelta)(
                            delay.async_render(variables))

                self._async_unsub_delay_listener = \
                    async_track_point_in_utc_time(
                        self.hass, script_delay,
                        date_util.utcnow() + delay)
                self._cur = cur + 1
                if self._change_listener:
                    self.hass.async_add_job(self._change_listener)
                return

            elif CONF_CONDITION in action:
                if not self._async_check_condition(action, variables):
                    break

            elif CONF_EVENT in action:
                self._async_fire_event(action)

            else:
                yield from self._async_call_service(action, variables)

        self._cur = -1
        self.last_action = None
        if self._change_listener:
            self.hass.async_add_job(self._change_listener)
예제 #18
0
def test_scan_devices():
    """Test the scanning for devices."""
    ctrl = mock.MagicMock()
    fake_clients = [
        {'mac': '123', 'last_seen': dt_util.as_timestamp(dt_util.utcnow())},
        {'mac': '234', 'last_seen': dt_util.as_timestamp(dt_util.utcnow())},
    ]
    ctrl.get_clients.return_value = fake_clients
    scanner = unifi.UnifiScanner(ctrl, DEFAULT_DETECTION_TIME)
    assert set(scanner.scan_devices()) == set(['123', '234'])
예제 #19
0
    def _check_state_available(self):
        """Set to unavailable if issue been occurring over 1 minute."""
        if not self._first_error_timestamp:
            self._first_error_timestamp = dt_util.utcnow()
        else:
            tdelta = dt_util.utcnow() - self._first_error_timestamp
            if tdelta.total_seconds() >= 60:
                self._available = False

        return self._available
예제 #20
0
    def test_max_queue(self):
        """Test the maximum queue length."""
        # make a wrapped method
        mock_method = MagicMock()
        retryer = influxdb.RetryOnError(
            self.hass, retry_limit=4, queue_limit=3)
        wrapped = retryer(mock_method)
        mock_method.side_effect = Exception()

        # call it once, call fails, queue fills to 1
        wrapped(1, 2, test=3)
        self.assertEqual(mock_method.call_count, 1)
        mock_method.assert_called_with(1, 2, test=3)
        self.assertEqual(len(wrapped._retry_queue), 1)

        # two more calls that failed. queue is 3
        wrapped(1, 2, test=3)
        wrapped(1, 2, test=3)
        self.assertEqual(mock_method.call_count, 3)
        self.assertEqual(len(wrapped._retry_queue), 3)

        # another call, queue gets limited to 3
        wrapped(1, 2, test=3)
        self.assertEqual(mock_method.call_count, 4)
        self.assertEqual(len(wrapped._retry_queue), 3)

        # time passes
        start = dt_util.utcnow()
        shifted_time = start + (timedelta(seconds=20 + 1))
        self.hass.bus.fire(ha.EVENT_TIME_CHANGED,
                           {ha.ATTR_NOW: shifted_time})
        self.hass.block_till_done()

        # only the three queued calls where repeated
        self.assertEqual(mock_method.call_count, 7)
        self.assertEqual(len(wrapped._retry_queue), 3)

        # another call, queue stays limited
        wrapped(1, 2, test=3)
        self.assertEqual(mock_method.call_count, 8)
        self.assertEqual(len(wrapped._retry_queue), 3)

        # disable the side effect
        mock_method.side_effect = None

        # time passes, all calls should succeed
        start = dt_util.utcnow()
        shifted_time = start + (timedelta(seconds=20 + 1))
        self.hass.bus.fire(ha.EVENT_TIME_CHANGED,
                           {ha.ATTR_NOW: shifted_time})
        self.hass.block_till_done()

        # three queued calls succeeded, queue empty.
        self.assertEqual(mock_method.call_count, 11)
        self.assertEqual(len(wrapped._retry_queue), 0)
예제 #21
0
def test_scanner_update():
    """Test the scanner update."""
    ctrl = mock.MagicMock()
    fake_clients = [
        {'mac': '123', 'last_seen': dt_util.as_timestamp(dt_util.utcnow())},
        {'mac': '234', 'last_seen': dt_util.as_timestamp(dt_util.utcnow())},
    ]
    ctrl.get_clients.return_value = fake_clients
    unifi.UnifiScanner(ctrl, DEFAULT_DETECTION_TIME)
    assert ctrl.get_clients.call_count == 1
    assert ctrl.get_clients.call_args == mock.call()
예제 #22
0
    def test_back_to_back_trigger_with_no_disarm_after_trigger(self):
        """Test no disarm after back to back trigger."""
        self.assertTrue(setup_component(
            self.hass, alarm_control_panel.DOMAIN,
            {'alarm_control_panel': {
                'platform': 'manual_mqtt',
                'name': 'test',
                'trigger_time': 5,
                'pending_time': 0,
                'disarm_after_trigger': False,
                'command_topic': 'alarm/command',
                'state_topic': 'alarm/state',
            }}))

        entity_id = 'alarm_control_panel.test'

        self.assertEqual(STATE_ALARM_DISARMED,
                         self.hass.states.get(entity_id).state)

        alarm_control_panel.alarm_arm_away(self.hass, CODE, entity_id)
        self.hass.block_till_done()

        self.assertEqual(STATE_ALARM_ARMED_AWAY,
                         self.hass.states.get(entity_id).state)

        alarm_control_panel.alarm_trigger(self.hass, entity_id=entity_id)
        self.hass.block_till_done()

        self.assertEqual(STATE_ALARM_TRIGGERED,
                         self.hass.states.get(entity_id).state)

        future = dt_util.utcnow() + timedelta(seconds=5)
        with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.'
                    'dt_util.utcnow'), return_value=future):
            fire_time_changed(self.hass, future)
            self.hass.block_till_done()

        self.assertEqual(STATE_ALARM_ARMED_AWAY,
                         self.hass.states.get(entity_id).state)

        alarm_control_panel.alarm_trigger(self.hass, entity_id=entity_id)
        self.hass.block_till_done()

        self.assertEqual(STATE_ALARM_TRIGGERED,
                         self.hass.states.get(entity_id).state)

        future = dt_util.utcnow() + timedelta(seconds=5)
        with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.'
                    'dt_util.utcnow'), return_value=future):
            fire_time_changed(self.hass, future)
            self.hass.block_till_done()

        self.assertEqual(STATE_ALARM_ARMED_AWAY,
                         self.hass.states.get(entity_id).state)
예제 #23
0
 def pressed():
     """Handle the press of the LiteJet switch's button."""
     nonlocal cancel_pressed_more_than, pressed_time
     nonlocal held_less_than, held_more_than
     pressed_time = dt_util.utcnow()
     if held_more_than is None and held_less_than is None:
         hass.add_job(call_action)
     if held_more_than is not None and held_less_than is None:
         cancel_pressed_more_than = track_point_in_utc_time(
             hass,
             pressed_more_than_satisfied,
             dt_util.utcnow() + held_more_than)
예제 #24
0
 def simulate_time(self, delta):
     _LOGGER.info(
         '*** simulate time change by %s: %s',
         delta,
         self.start_time + delta)
     self.last_delta = delta
     with mock.patch('homeassistant.helpers.condition.dt_util.utcnow',
                     return_value=self.start_time + delta):
         _LOGGER.info('now=%s', dt_util.utcnow())
         fire_time_changed(self.hass, self.start_time + delta)
         self.hass.block_till_done()
         _LOGGER.info('done with now=%s', dt_util.utcnow())
예제 #25
0
    def test_measure(self):
        """Test the history statistics sensor measure."""
        t0 = dt_util.utcnow() - timedelta(minutes=40)
        t1 = t0 + timedelta(minutes=20)
        t2 = dt_util.utcnow() - timedelta(minutes=10)

        # Start     t0        t1        t2        End
        # |--20min--|--20min--|--10min--|--10min--|
        # |---off---|---on----|---off---|---on----|

        fake_states = {
            'binary_sensor.test_id': [
                ha.State('binary_sensor.test_id', 'on', last_changed=t0),
                ha.State('binary_sensor.test_id', 'off', last_changed=t1),
                ha.State('binary_sensor.test_id', 'on', last_changed=t2),
            ]
        }

        start = Template('{{ as_timestamp(now()) - 3600 }}', self.hass)
        end = Template('{{ now() }}', self.hass)

        sensor1 = HistoryStatsSensor(
            self.hass, 'binary_sensor.test_id', 'on', start, end, None,
            'time', 'Test')

        sensor2 = HistoryStatsSensor(
            self.hass, 'unknown.id', 'on', start, end, None, 'time', 'Test')

        sensor3 = HistoryStatsSensor(
            self.hass, 'binary_sensor.test_id', 'on', start, end, None,
            'count', 'test')

        sensor4 = HistoryStatsSensor(
            self.hass, 'binary_sensor.test_id', 'on', start, end, None,
            'ratio', 'test')

        assert sensor1._type == 'time'
        assert sensor3._type == 'count'
        assert sensor4._type == 'ratio'

        with patch('homeassistant.components.history.'
                   'state_changes_during_period', return_value=fake_states):
            with patch('homeassistant.components.history.get_state',
                       return_value=None):
                sensor1.update()
                sensor2.update()
                sensor3.update()
                sensor4.update()

        assert sensor1.state == 0.5
        assert sensor2.state is None
        assert sensor3.state == 2
        assert sensor4.state == 50
예제 #26
0
    def test_action_delay(self):
        """Test action delay."""
        assert setup_component(self.hass, automation.DOMAIN, {
            automation.DOMAIN: {
                'alias': 'hello',
                'trigger': {
                    'platform': 'event',
                    'event_type': 'test_event',
                },
                'action': [
                    {
                        'service': 'test.automation',
                        'data_template': {
                            'some': '{{ trigger.platform }} - '
                                    '{{ trigger.event.event_type }}'
                        }
                    },
                    {'delay': {'minutes': '10'}},
                    {
                        'service': 'test.automation',
                        'data_template': {
                            'some': '{{ trigger.platform }} - '
                                    '{{ trigger.event.event_type }}'
                        }
                    },
                ]
            }
        })

        time = dt_util.utcnow()

        with patch('homeassistant.components.automation.utcnow',
                   return_value=time):
            self.hass.bus.fire('test_event')
            self.hass.block_till_done()

        assert len(self.calls) == 1
        assert self.calls[0].data['some'] == 'event - test_event'

        future = dt_util.utcnow() + timedelta(minutes=10)
        fire_time_changed(self.hass, future)
        self.hass.block_till_done()

        assert len(self.calls) == 2
        assert self.calls[1].data['some'] == 'event - test_event'

        state = self.hass.states.get('automation.hello')
        assert state is not None
        assert state.attributes.get('last_triggered') == time
        state = self.hass.states.get('group.all_automations')
        assert state is not None
        assert state.attributes.get('entity_id') == ('automation.hello',)
async def test_back_to_back_trigger_with_no_disarm_after_trigger(hass):
    """Test disarm after trigger."""
    assert await async_setup_component(
        hass, alarm_control_panel.DOMAIN,
        {'alarm_control_panel': {
            'platform': 'manual',
            'name': 'test',
            'trigger_time': 5,
            'pending_time': 0,
            'disarm_after_trigger': False
        }})

    entity_id = 'alarm_control_panel.test'

    assert STATE_ALARM_DISARMED == \
        hass.states.get(entity_id).state

    common.async_alarm_arm_away(hass, CODE, entity_id)
    await hass.async_block_till_done()

    assert STATE_ALARM_ARMED_AWAY == \
        hass.states.get(entity_id).state

    common.async_alarm_trigger(hass, entity_id=entity_id)
    await hass.async_block_till_done()

    assert STATE_ALARM_TRIGGERED == \
        hass.states.get(entity_id).state

    future = dt_util.utcnow() + timedelta(seconds=5)
    with patch(('homeassistant.components.manual.alarm_control_panel.'
                'dt_util.utcnow'), return_value=future):
        async_fire_time_changed(hass, future)
        await hass.async_block_till_done()

    assert STATE_ALARM_ARMED_AWAY == \
        hass.states.get(entity_id).state

    common.async_alarm_trigger(hass, entity_id=entity_id)
    await hass.async_block_till_done()

    assert STATE_ALARM_TRIGGERED == \
        hass.states.get(entity_id).state

    future = dt_util.utcnow() + timedelta(seconds=5)
    with patch(('homeassistant.components.manual.alarm_control_panel.'
                'dt_util.utcnow'), return_value=future):
        async_fire_time_changed(hass, future)
        await hass.async_block_till_done()

    assert STATE_ALARM_ARMED_AWAY == \
        hass.states.get(entity_id).state
예제 #28
0
async def simulate_time(hass, mock_lj, delta):
    """Test to simulate time."""
    _LOGGER.info(
        '*** simulate time change by %s: %s',
        delta,
        mock_lj.start_time + delta)
    mock_lj.last_delta = delta
    with mock.patch('homeassistant.helpers.condition.dt_util.utcnow',
                    return_value=mock_lj.start_time + delta):
        _LOGGER.info('now=%s', dt_util.utcnow())
        async_fire_time_changed(hass, mock_lj.start_time + delta)
        await hass.async_block_till_done()
        _LOGGER.info('done with now=%s', dt_util.utcnow())
예제 #29
0
async def check_has_unique_id(entity, ready_callback, timeout_callback, loop):
    """Wait for entity to have unique_id."""
    start_time = dt_util.utcnow()
    while True:
        waited = int((dt_util.utcnow()-start_time).total_seconds())
        if entity.unique_id:
            ready_callback(waited)
            return
        elif waited >= const.NODE_READY_WAIT_SECS:
            # Wait up to NODE_READY_WAIT_SECS seconds for unique_id to appear.
            timeout_callback(waited)
            return
        await asyncio.sleep(1, loop=loop)
예제 #30
0
    def test_chain(self):
        """Test if filter chaining works."""
        self.init_recorder()
        config = {
            'history': {
            },
            'sensor': {
                'platform': 'filter',
                'name': 'test',
                'entity_id': 'sensor.test_monitored',
                'history_period': '00:05',
                'filters': [{
                    'filter': 'outlier',
                    'window_size': 10,
                    'radius': 4.0
                    }, {
                        'filter': 'lowpass',
                        'time_constant': 10,
                        'precision': 2
                    }, {
                        'filter': 'throttle',
                        'window_size': 1
                    }]
            }
        }
        t_0 = dt_util.utcnow() - timedelta(minutes=1)
        t_1 = dt_util.utcnow() - timedelta(minutes=2)
        t_2 = dt_util.utcnow() - timedelta(minutes=3)

        fake_states = {
            'sensor.test_monitored': [
                ha.State('sensor.test_monitored', 18.0, last_changed=t_0),
                ha.State('sensor.test_monitored', 19.0, last_changed=t_1),
                ha.State('sensor.test_monitored', 18.2, last_changed=t_2),
            ]
        }

        with patch('homeassistant.components.history.'
                   'state_changes_during_period', return_value=fake_states):
            with patch('homeassistant.components.history.'
                       'get_last_state_changes', return_value=fake_states):
                with assert_setup_component(1, 'sensor'):
                    assert setup_component(self.hass, 'sensor', config)

                for value in self.values:
                    self.hass.states.set(
                        config['sensor']['entity_id'], value.state)
                    self.hass.block_till_done()

                state = self.hass.states.get('sensor.test')
                self.assertEqual('17.05', state.state)
예제 #31
0
        def state_message_received(msg):
            """Handle a new received MQTT state message."""
            payload = msg.payload
            # auto-expire enabled?
            expire_after = self._config.get(CONF_EXPIRE_AFTER)

            if expire_after is not None and expire_after > 0:

                # When expire_after is set, and we receive a message, assume device is
                # not expired since it has to be to receive the message
                self._expired = False

                # Reset old trigger
                if self._expiration_trigger:
                    self._expiration_trigger()
                    self._expiration_trigger = None

                # Set new trigger
                expiration_at = dt_util.utcnow() + timedelta(
                    seconds=expire_after)

                self._expiration_trigger = async_track_point_in_utc_time(
                    self.hass, self._value_is_expired, expiration_at)

            value_template = self._config.get(CONF_VALUE_TEMPLATE)
            if value_template is not None:
                payload = value_template.async_render_with_possible_json_value(
                    payload, variables={"entity_id": self.entity_id})
                if not payload.strip():  # No output from template, ignore
                    _LOGGER.debug(
                        "Empty template output for entity: %s with state topic: %s. Payload: '%s', with value template '%s'",
                        self._config[CONF_NAME],
                        self._config[CONF_STATE_TOPIC],
                        msg.payload,
                        value_template,
                    )
                    return

            if payload == self._config[CONF_PAYLOAD_ON]:
                self._state = True
            elif payload == self._config[CONF_PAYLOAD_OFF]:
                self._state = False
            else:  # Payload is not for this entity
                template_info = ""
                if value_template is not None:
                    template_info = f", template output: '{payload}', with value template '{str(value_template)}'"
                _LOGGER.info(
                    "No matching payload found for entity: %s with state topic: %s. Payload: '%s'%s",
                    self._config[CONF_NAME],
                    self._config[CONF_STATE_TOPIC],
                    msg.payload,
                    template_info,
                )
                return

            if self._delay_listener is not None:
                self._delay_listener()
                self._delay_listener = None

            off_delay = self._config.get(CONF_OFF_DELAY)
            if self._state and off_delay is not None:
                self._delay_listener = evt.async_call_later(
                    self.hass, off_delay, off_delay_listener)

            self.async_write_ha_state()
예제 #32
0
            self.clear_position()
            return

        should_update = force_update
        self.duration = duration
        current_position = position_info.get(POSITION_SECONDS)

        # player started reporting position?
        if current_position is not None and self.position is None:
            should_update = True

        # position jumped?
        if current_position is not None and self.position is not None:
            if self.playback_status == SONOS_STATE_PLAYING:
                assert self.position_updated_at is not None
                time_delta = dt_util.utcnow() - self.position_updated_at
                time_diff = time_delta.total_seconds()
            else:
                time_diff = 0

            calculated_position = self.position + time_diff

            if abs(calculated_position - current_position) > 1.5:
                should_update = True

        if current_position is None:
            self.clear_position()
        elif should_update:
            self.position = current_position
            self.position_updated_at = dt_util.utcnow()
예제 #33
0
def _remaining_time_to_timestamp(probe: MeaterProbe) -> datetime | None:
    """Convert remaining time to timestamp."""
    if not probe.cook or probe.cook.time_remaining < 0:
        return None
    return dt_util.utcnow() + timedelta(seconds=probe.cook.time_remaining)
예제 #34
0
async def test_dump_data(hass):
    """Test that we cache data."""
    states = [
        State("input_boolean.b0", "on"),
        State("input_boolean.b1", "on"),
        State("input_boolean.b2", "on"),
        State("input_boolean.b5", "unavailable", {"restored": True}),
    ]

    entity = Entity()
    entity.hass = hass
    entity.entity_id = "input_boolean.b0"
    await entity.async_internal_added_to_hass()

    entity = RestoreEntity()
    entity.hass = hass
    entity.entity_id = "input_boolean.b1"
    await entity.async_internal_added_to_hass()

    data = await RestoreStateData.async_get_instance(hass)
    now = dt_util.utcnow()
    data.last_states = {
        "input_boolean.b0":
        StoredState(State("input_boolean.b0", "off"), now),
        "input_boolean.b1":
        StoredState(State("input_boolean.b1", "off"), now),
        "input_boolean.b2":
        StoredState(State("input_boolean.b2", "off"), now),
        "input_boolean.b3":
        StoredState(State("input_boolean.b3", "off"), now),
        "input_boolean.b4":
        StoredState(
            State("input_boolean.b4", "off"),
            datetime(1985, 10, 26, 1, 22, tzinfo=dt_util.UTC),
        ),
        "input_boolean.b5":
        StoredState(State("input_boolean.b5", "off"), now),
    }

    with patch("homeassistant.helpers.restore_state.Store.async_save"
               ) as mock_write_data, patch.object(hass.states,
                                                  "async_all",
                                                  return_value=states):
        await data.async_dump_states()

    assert mock_write_data.called
    args = mock_write_data.mock_calls[0][1]
    written_states = args[0]

    # b0 should not be written, since it didn't extend RestoreEntity
    # b1 should be written, since it is present in the current run
    # b2 should not be written, since it is not registered with the helper
    # b3 should be written, since it is still not expired
    # b4 should not be written, since it is now expired
    # b5 should be written, since current state is restored by entity registry
    assert len(written_states) == 3
    assert written_states[0]["state"]["entity_id"] == "input_boolean.b1"
    assert written_states[0]["state"]["state"] == "on"
    assert written_states[1]["state"]["entity_id"] == "input_boolean.b3"
    assert written_states[1]["state"]["state"] == "off"
    assert written_states[2]["state"]["entity_id"] == "input_boolean.b5"
    assert written_states[2]["state"]["state"] == "off"

    # Test that removed entities are not persisted
    await entity.async_remove()

    with patch("homeassistant.helpers.restore_state.Store.async_save"
               ) as mock_write_data, patch.object(hass.states,
                                                  "async_all",
                                                  return_value=states):
        await data.async_dump_states()

    assert mock_write_data.called
    args = mock_write_data.mock_calls[0][1]
    written_states = args[0]
    assert len(written_states) == 2
    assert written_states[0]["state"]["entity_id"] == "input_boolean.b3"
    assert written_states[0]["state"]["state"] == "off"
    assert written_states[1]["state"]["entity_id"] == "input_boolean.b5"
    assert written_states[1]["state"]["state"] == "off"
예제 #35
0
    async def async_update_status(self):
        """Use the poll session to always get the status of the player."""
        import xmltodict
        response = None

        url = 'Status'
        etag = ''
        if self._status is not None:
            etag = self._status.get('@etag', '')

        if etag != '':
            url = 'Status?etag={}&timeout=120.0'.format(etag)
        url = "http://{}:{}/{}".format(self.host, self.port, url)

        _LOGGER.debug("Calling URL: %s", url)

        try:

            with async_timeout.timeout(125, loop=self._hass.loop):
                response = await self._polling_session.get(
                    url, headers={CONNECTION: KEEP_ALIVE})

            if response.status == 200:
                result = await response.text()
                self._is_online = True
                self._last_status_update = dt_util.utcnow()
                self._status = xmltodict.parse(result)['status'].copy()

                group_name = self._status.get('groupName', None)
                if group_name != self._group_name:
                    _LOGGER.debug("Group name change detected on device: %s",
                                  self.host)
                    self._group_name = group_name
                    # the sleep is needed to make sure that the
                    # devices is synced
                    await asyncio.sleep(1, loop=self._hass.loop)
                    await self.async_trigger_sync_on_all()
                elif self.is_grouped:
                    # when player is grouped we need to fetch volume from
                    # sync_status. We will force an update if the player is
                    # grouped this isn't a foolproof solution. A better
                    # solution would be to fetch sync_status more often when
                    # the device is playing. This would solve alot of
                    # problems. This change will be done when the
                    # communication is moved to a separate library
                    await self.force_update_sync_status()

                self.async_schedule_update_ha_state()
            elif response.status == 595:
                _LOGGER.info("Status 595 returned, treating as timeout")
                raise BluesoundPlayer._TimeoutException()
            else:
                _LOGGER.error("Error %s on %s. Trying one more time",
                              response.status, url)

        except (asyncio.TimeoutError, ClientError):
            self._is_online = False
            self._last_status_update = None
            self._status = None
            self.async_schedule_update_ha_state()
            _LOGGER.info("Client connection error, marking %s as offline",
                         self._name)
            raise
예제 #36
0
async def test_hue_events(hass, mock_bridge):
    """Test that hue remotes fire events when pressed."""
    mock_bridge.mock_sensor_responses.append(SENSOR_RESPONSE)

    events = async_capture_events(hass, CONF_HUE_EVENT)

    await setup_bridge(hass, mock_bridge)
    assert len(mock_bridge.mock_requests) == 1
    assert len(hass.states.async_all()) == 7
    assert len(events) == 0

    new_sensor_response = dict(SENSOR_RESPONSE)
    new_sensor_response["7"]["state"] = {
        "buttonevent": 18,
        "lastupdated": "2019-12-28T22:58:03",
    }
    mock_bridge.mock_sensor_responses.append(new_sensor_response)

    # Force updates to run again
    async_fire_time_changed(
        hass,
        dt_util.utcnow() + sensor_base.SensorManager.SCAN_INTERVAL)
    await hass.async_block_till_done()

    assert len(mock_bridge.mock_requests) == 2
    assert len(hass.states.async_all()) == 7
    assert len(events) == 1
    assert events[-1].data == {
        "id": "hue_tap",
        "unique_id": "00:00:00:00:00:44:23:08-f2",
        "event": 18,
        "last_updated": "2019-12-28T22:58:03",
    }

    new_sensor_response = dict(new_sensor_response)
    new_sensor_response["8"]["state"] = {
        "buttonevent": 3002,
        "lastupdated": "2019-12-28T22:58:03",
    }
    mock_bridge.mock_sensor_responses.append(new_sensor_response)

    # Force updates to run again
    async_fire_time_changed(
        hass,
        dt_util.utcnow() + sensor_base.SensorManager.SCAN_INTERVAL)
    await hass.async_block_till_done()

    assert len(mock_bridge.mock_requests) == 3
    assert len(hass.states.async_all()) == 7
    assert len(events) == 2
    assert events[-1].data == {
        "id": "hue_dimmer_switch_1",
        "unique_id": "00:17:88:01:10:3e:3a:dc-02-fc00",
        "event": 3002,
        "last_updated": "2019-12-28T22:58:03",
    }

    # Fire old event, it should be ignored
    new_sensor_response = dict(new_sensor_response)
    new_sensor_response["8"]["state"] = {
        "buttonevent": 18,
        "lastupdated": "2019-12-28T22:58:02",
    }
    mock_bridge.mock_sensor_responses.append(new_sensor_response)

    # Force updates to run again
    async_fire_time_changed(
        hass,
        dt_util.utcnow() + sensor_base.SensorManager.SCAN_INTERVAL)
    await hass.async_block_till_done()

    assert len(mock_bridge.mock_requests) == 4
    assert len(hass.states.async_all()) == 7
    assert len(events) == 2

    # Add a new remote. In discovery the new event is registered **but not fired**
    new_sensor_response = dict(new_sensor_response)
    new_sensor_response["21"] = {
        "state": {
            "rotaryevent": 2,
            "expectedrotation": 208,
            "expectedeventduration": 400,
            "lastupdated": "2020-01-31T15:56:19",
        },
        "swupdate": {
            "state": "noupdates",
            "lastinstall": "2019-11-26T03:35:21"
        },
        "config": {
            "on": True,
            "battery": 100,
            "reachable": True,
            "pending": []
        },
        "name": "Lutron Aurora 1",
        "type": "ZLLRelativeRotary",
        "modelid": "Z3-1BRL",
        "manufacturername": "Lutron",
        "productname": "Lutron Aurora",
        "diversityid": "2c3a75ff-55c4-4e4d-8c44-82d330b8eb9b",
        "swversion": "3.4",
        "uniqueid": "ff:ff:00:0f:e7:fd:bc:b7-01-fc00-0014",
        "capabilities": {
            "certified":
            True,
            "primary":
            True,
            "inputs": [{
                "repeatintervals": [400],
                "events": [
                    {
                        "rotaryevent": 1,
                        "eventtype": "start"
                    },
                    {
                        "rotaryevent": 2,
                        "eventtype": "repeat"
                    },
                ],
            }],
        },
    }
    mock_bridge.mock_sensor_responses.append(new_sensor_response)

    # Force updates to run again
    async_fire_time_changed(
        hass,
        dt_util.utcnow() + sensor_base.SensorManager.SCAN_INTERVAL)
    await hass.async_block_till_done()

    assert len(mock_bridge.mock_requests) == 5
    assert len(hass.states.async_all()) == 8
    assert len(events) == 2

    # A new press fires the event
    new_sensor_response["21"]["state"]["lastupdated"] = "2020-01-31T15:57:19"
    mock_bridge.mock_sensor_responses.append(new_sensor_response)

    # Force updates to run again
    async_fire_time_changed(
        hass,
        dt_util.utcnow() + sensor_base.SensorManager.SCAN_INTERVAL)
    await hass.async_block_till_done()

    assert len(mock_bridge.mock_requests) == 6
    assert len(hass.states.async_all()) == 8
    assert len(events) == 3
    assert events[-1].data == {
        "id": "lutron_aurora_1",
        "unique_id": "ff:ff:00:0f:e7:fd:bc:b7-01-fc00-0014",
        "event": 2,
        "last_updated": "2020-01-31T15:57:19",
    }
예제 #37
0
    async def async_update(self, **kwargs):
        """Update the sensor."""
        departure_time = utcnow() + timedelta(
            minutes=self.config_entry.options.get("offset", 0)
        )

        departure_time_tz_berlin = departure_time.astimezone(BERLIN_TIME_ZONE)

        payload = {
            "station": self.config_entry.data[CONF_STATION],
            "time": {
                "date": departure_time_tz_berlin.strftime("%d.%m.%Y"),
                "time": departure_time_tz_berlin.strftime("%H:%M"),
            },
            "maxList": MAX_LIST,
            "maxTimeOffset": MAX_TIME_OFFSET,
            "useRealtime": self.config_entry.options.get("realtime", False),
        }

        if "filter" in self.config_entry.options:
            payload.update({"filter": self.config_entry.options["filter"]})

        try:
            data = await self.gti.departureList(payload)
        except InvalidAuth as error:
            if self._last_error != InvalidAuth:
                _LOGGER.error("Authentication failed: %r", error)
                self._last_error = InvalidAuth
            self._available = False
        except ClientConnectorError as error:
            if self._last_error != ClientConnectorError:
                _LOGGER.warning("Network unavailable: %r", error)
                self._last_error = ClientConnectorError
            self._available = False
        except Exception as error:  # pylint: disable=broad-except
            if self._last_error != error:
                _LOGGER.error("Error occurred while fetching data: %r", error)
                self._last_error = error
            self._available = False

        if not (data["returnCode"] == "OK" and data.get("departures")):
            self._available = False
            return

        if self._last_error == ClientConnectorError:
            _LOGGER.debug("Network available again")

        self._last_error = None

        departure = data["departures"][0]
        line = departure["line"]
        delay = departure.get("delay", 0)
        self._available = True
        self._state = (
            departure_time
            + timedelta(minutes=departure["timeOffset"])
            + timedelta(seconds=delay)
        ).isoformat()

        self.attr.update(
            {
                ATTR_LINE: line["name"],
                ATTR_ORIGIN: line["origin"],
                ATTR_DIRECTION: line["direction"],
                ATTR_TYPE: line["type"]["shortInfo"],
                ATTR_ID: line["id"],
                ATTR_DELAY: delay,
            }
        )

        departures = []
        for departure in data["departures"]:
            line = departure["line"]
            delay = departure.get("delay", 0)
            departures.append(
                {
                    ATTR_DEPARTURE: departure_time
                    + timedelta(minutes=departure["timeOffset"])
                    + timedelta(seconds=delay),
                    ATTR_LINE: line["name"],
                    ATTR_ORIGIN: line["origin"],
                    ATTR_DIRECTION: line["direction"],
                    ATTR_TYPE: line["type"]["shortInfo"],
                    ATTR_ID: line["id"],
                    ATTR_DELAY: delay,
                }
            )
        self.attr[ATTR_NEXT] = departures
예제 #38
0
    async def async_update_status(self):
        """Use the poll session to always get the status of the player."""
        response = None

        url = "Status"
        etag = ""
        if self._status is not None:
            etag = self._status.get("@etag", "")

        if etag != "":
            url = f"Status?etag={etag}&timeout=120.0"
        url = f"http://{self.host}:{self.port}/{url}"

        _LOGGER.debug("Calling URL: %s", url)

        try:

            with async_timeout.timeout(125):
                response = await self._polling_session.get(
                    url, headers={CONNECTION: KEEP_ALIVE})

            if response.status == HTTP_OK:
                result = await response.text()
                self._is_online = True
                self._last_status_update = dt_util.utcnow()
                self._status = xmltodict.parse(result)["status"].copy()

                group_name = self._status.get("groupName")
                if group_name != self._group_name:
                    _LOGGER.debug("Group name change detected on device: %s",
                                  self.host)
                    self._group_name = group_name

                    # rebuild ordered list of entity_ids that are in the group, master is first
                    self._group_list = self.rebuild_bluesound_group()

                    # the sleep is needed to make sure that the
                    # devices is synced
                    await asyncio.sleep(1)
                    await self.async_trigger_sync_on_all()
                elif self.is_grouped:
                    # when player is grouped we need to fetch volume from
                    # sync_status. We will force an update if the player is
                    # grouped this isn't a foolproof solution. A better
                    # solution would be to fetch sync_status more often when
                    # the device is playing. This would solve a lot of
                    # problems. This change will be done when the
                    # communication is moved to a separate library
                    await self.force_update_sync_status()

                self.async_write_ha_state()
            elif response.status == 595:
                _LOGGER.info("Status 595 returned, treating as timeout")
                raise BluesoundPlayer._TimeoutException()
            else:
                _LOGGER.error("Error %s on %s. Trying one more time",
                              response.status, url)

        except (asyncio.TimeoutError, ClientError):
            self._is_online = False
            self._last_status_update = None
            self._status = None
            self.async_write_ha_state()
            _LOGGER.info("Client connection error, marking %s as offline",
                         self._name)
            raise
예제 #39
0
    def async_update(self):
        """Get the time and updates the states."""
        from astral import Astral

        today = dt_util.as_local(dt_util.utcnow()).date()
        self._state = Astral().moon_phase(today)
예제 #40
0
async def test_setup(hass):
    """Test the general setup of the integration."""
    # Set up some mock feed entries for this test.
    mock_entry_1 = _generate_mock_feed_entry(
        "1234",
        "Title 1",
        15.5,
        (38.0, -3.0),
        locality="Locality 1",
        attribution="Attribution 1",
        time=datetime.datetime(2018, 9, 22, 8, 0,
                               tzinfo=datetime.timezone.utc),
        magnitude=5.7,
        mmi=5,
        depth=10.5,
        quality="best",
    )
    mock_entry_2 = _generate_mock_feed_entry("2345",
                                             "Title 2",
                                             20.5, (38.1, -3.1),
                                             magnitude=4.6)
    mock_entry_3 = _generate_mock_feed_entry("3456",
                                             "Title 3",
                                             25.5, (38.2, -3.2),
                                             locality="Locality 3")
    mock_entry_4 = _generate_mock_feed_entry("4567", "Title 4", 12.5,
                                             (38.3, -3.3))

    # Patching 'utcnow' to gain more control over the timed update.
    utcnow = dt_util.utcnow()
    with patch("homeassistant.util.dt.utcnow", return_value=utcnow), patch(
            "aio_geojson_client.feed.GeoJsonFeed.update",
            new_callable=CoroutineMock) as mock_feed_update:
        mock_feed_update.return_value = "OK", [
            mock_entry_1, mock_entry_2, mock_entry_3
        ]
        assert await async_setup_component(hass, geonetnz_quakes.DOMAIN,
                                           CONFIG)
        # Artificially trigger update and collect events.
        hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
        await hass.async_block_till_done()

        all_states = hass.states.async_all()
        # 3 geolocation and 1 sensor entities
        assert len(all_states) == 4

        state = hass.states.get("geo_location.title_1")
        assert state is not None
        assert state.name == "Title 1"
        assert state.attributes == {
            ATTR_EXTERNAL_ID:
            "1234",
            ATTR_LATITUDE:
            38.0,
            ATTR_LONGITUDE:
            -3.0,
            ATTR_FRIENDLY_NAME:
            "Title 1",
            ATTR_LOCALITY:
            "Locality 1",
            ATTR_ATTRIBUTION:
            "Attribution 1",
            ATTR_TIME:
            datetime.datetime(2018, 9, 22, 8, 0, tzinfo=datetime.timezone.utc),
            ATTR_MAGNITUDE:
            5.7,
            ATTR_DEPTH:
            10.5,
            ATTR_MMI:
            5,
            ATTR_QUALITY:
            "best",
            ATTR_UNIT_OF_MEASUREMENT:
            "km",
            ATTR_SOURCE:
            "geonetnz_quakes",
            ATTR_ICON:
            "mdi:pulse",
        }
        assert float(state.state) == 15.5

        state = hass.states.get("geo_location.title_2")
        assert state is not None
        assert state.name == "Title 2"
        assert state.attributes == {
            ATTR_EXTERNAL_ID: "2345",
            ATTR_LATITUDE: 38.1,
            ATTR_LONGITUDE: -3.1,
            ATTR_FRIENDLY_NAME: "Title 2",
            ATTR_MAGNITUDE: 4.6,
            ATTR_UNIT_OF_MEASUREMENT: "km",
            ATTR_SOURCE: "geonetnz_quakes",
            ATTR_ICON: "mdi:pulse",
        }
        assert float(state.state) == 20.5

        state = hass.states.get("geo_location.title_3")
        assert state is not None
        assert state.name == "Title 3"
        assert state.attributes == {
            ATTR_EXTERNAL_ID: "3456",
            ATTR_LATITUDE: 38.2,
            ATTR_LONGITUDE: -3.2,
            ATTR_FRIENDLY_NAME: "Title 3",
            ATTR_LOCALITY: "Locality 3",
            ATTR_UNIT_OF_MEASUREMENT: "km",
            ATTR_SOURCE: "geonetnz_quakes",
            ATTR_ICON: "mdi:pulse",
        }
        assert float(state.state) == 25.5

        # Simulate an update - two existing, one new entry, one outdated entry
        mock_feed_update.return_value = "OK", [
            mock_entry_1, mock_entry_4, mock_entry_3
        ]
        async_fire_time_changed(hass, utcnow + DEFAULT_SCAN_INTERVAL)
        await hass.async_block_till_done()

        all_states = hass.states.async_all()
        assert len(all_states) == 4

        # Simulate an update - empty data, but successful update,
        # so no changes to entities.
        mock_feed_update.return_value = "OK_NO_DATA", None
        async_fire_time_changed(hass, utcnow + 2 * DEFAULT_SCAN_INTERVAL)
        await hass.async_block_till_done()

        all_states = hass.states.async_all()
        assert len(all_states) == 4

        # Simulate an update - empty data, removes all entities
        mock_feed_update.return_value = "ERROR", None
        async_fire_time_changed(hass, utcnow + 3 * DEFAULT_SCAN_INTERVAL)
        await hass.async_block_till_done()

        all_states = hass.states.async_all()
        assert len(all_states) == 1
예제 #41
0
async def test_setup(hass):
    """Test the general setup of the integration."""
    # Set up some mock feed entries for this test.
    mock_entry_1 = _generate_mock_feed_entry(
        "1234",
        "Title 1",
        15.5,
        (38.0, -3.0),
        attribution="Attribution 1",
    )
    mock_entry_2 = _generate_mock_feed_entry(
        "2345",
        "Title 2",
        20.5,
        (38.1, -3.1),
    )
    mock_entry_3 = _generate_mock_feed_entry(
        "3456",
        "Title 3",
        25.5,
        (38.2, -3.2),
    )
    mock_entry_4 = _generate_mock_feed_entry("4567", "Title 4", 12.5,
                                             (38.3, -3.3))

    # Patching 'utcnow' to gain more control over the timed update.
    utcnow = dt_util.utcnow()
    with freeze_time(utcnow), patch(
            "aio_georss_client.feed.GeoRssFeed.update") as mock_feed_update:
        mock_feed_update.return_value = "OK", [
            mock_entry_1, mock_entry_2, mock_entry_3
        ]
        assert await async_setup_component(hass, gdacs.DOMAIN, CONFIG)
        # Artificially trigger update and collect events.
        hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
        await hass.async_block_till_done()

        # 3 geolocation and 1 sensor entities
        assert (len(hass.states.async_entity_ids("geo_location")) +
                len(hass.states.async_entity_ids("sensor")) == 4)

        state = hass.states.get("sensor.gdacs_32_87336_117_22743")
        assert state is not None
        assert int(state.state) == 3
        assert state.name == "GDACS (32.87336, -117.22743)"
        attributes = state.attributes
        assert attributes[ATTR_STATUS] == "OK"
        assert attributes[ATTR_CREATED] == 3
        assert attributes[ATTR_LAST_UPDATE].tzinfo == dt_util.UTC
        assert attributes[ATTR_LAST_UPDATE_SUCCESSFUL].tzinfo == dt_util.UTC
        assert attributes[ATTR_LAST_UPDATE] == attributes[
            ATTR_LAST_UPDATE_SUCCESSFUL]
        assert attributes[ATTR_UNIT_OF_MEASUREMENT] == "alerts"
        assert attributes[ATTR_ICON] == "mdi:alert"

        # Simulate an update - two existing, one new entry, one outdated entry
        mock_feed_update.return_value = "OK", [
            mock_entry_1, mock_entry_4, mock_entry_3
        ]
        async_fire_time_changed(hass, utcnow + DEFAULT_SCAN_INTERVAL)
        await hass.async_block_till_done()

        assert (len(hass.states.async_entity_ids("geo_location")) +
                len(hass.states.async_entity_ids("sensor")) == 4)

        state = hass.states.get("sensor.gdacs_32_87336_117_22743")
        attributes = state.attributes
        assert attributes[ATTR_CREATED] == 1
        assert attributes[ATTR_UPDATED] == 2
        assert attributes[ATTR_REMOVED] == 1

        # Simulate an update - empty data, but successful update,
        # so no changes to entities.
        mock_feed_update.return_value = "OK_NO_DATA", None
        async_fire_time_changed(hass, utcnow + 2 * DEFAULT_SCAN_INTERVAL)
        await hass.async_block_till_done()

        assert (len(hass.states.async_entity_ids("geo_location")) +
                len(hass.states.async_entity_ids("sensor")) == 4)

        # Simulate an update - empty data, removes all entities
        mock_feed_update.return_value = "ERROR", None
        async_fire_time_changed(hass, utcnow + 3 * DEFAULT_SCAN_INTERVAL)
        await hass.async_block_till_done()

        assert (len(hass.states.async_entity_ids("geo_location")) +
                len(hass.states.async_entity_ids("sensor")) == 1)

        state = hass.states.get("sensor.gdacs_32_87336_117_22743")
        attributes = state.attributes
        assert attributes[ATTR_REMOVED] == 3
예제 #42
0
 def __enter__(self) -> List[State]:
     """Record time from which to track changes."""
     self.now = dt_util.utcnow()
     return self.states
예제 #43
0
    async def _async_learn_rf_command(self, command):
        """Learn a radiofrequency command."""
        device = self._device

        try:
            await device.async_request(device.api.sweep_frequency)

        except (BroadlinkException, OSError) as err:
            _LOGGER.debug("Failed to sweep frequency: %s", err)
            raise

        persistent_notification.async_create(
            self.hass,
            f"Press and hold the '{command}' button.",
            title="Sweep frequency",
            notification_id="sweep_frequency",
        )

        try:
            start_time = dt.utcnow()
            while (dt.utcnow() - start_time) < LEARNING_TIMEOUT:
                await asyncio.sleep(1)
                found = await device.async_request(device.api.check_frequency)
                if found:
                    break
            else:
                await device.async_request(device.api.cancel_sweep_frequency)
                raise TimeoutError(
                    "No radiofrequency found within "
                    f"{LEARNING_TIMEOUT.total_seconds()} seconds")

        finally:
            persistent_notification.async_dismiss(
                self.hass, notification_id="sweep_frequency")

        await asyncio.sleep(1)

        try:
            await device.async_request(device.api.find_rf_packet)

        except (BroadlinkException, OSError) as err:
            _LOGGER.debug("Failed to enter learning mode: %s", err)
            raise

        persistent_notification.async_create(
            self.hass,
            f"Press the '{command}' button again.",
            title="Learn command",
            notification_id="learn_command",
        )

        try:
            start_time = dt.utcnow()
            while (dt.utcnow() - start_time) < LEARNING_TIMEOUT:
                await asyncio.sleep(1)
                try:
                    code = await device.async_request(device.api.check_data)
                except (ReadError, StorageError):
                    continue
                return b64encode(code).decode("utf8")

            raise TimeoutError("No radiofrequency code received within "
                               f"{LEARNING_TIMEOUT.total_seconds()} seconds")

        finally:
            persistent_notification.async_dismiss(
                self.hass, notification_id="learn_command")
예제 #44
0
def record_states(hass):
    """Record some test states.

    We inject a bunch of state updates temperature sensors.
    """
    mp = "media_player.test"
    sns1 = "sensor.test1"
    sns2 = "sensor.test2"
    sns3 = "sensor.test3"
    sns4 = "sensor.test4"
    sns1_attr = {
        "device_class": "temperature",
        "state_class": "measurement",
        "unit_of_measurement": TEMP_CELSIUS,
    }
    sns2_attr = {
        "device_class": "humidity",
        "state_class": "measurement",
        "unit_of_measurement": "%",
    }
    sns3_attr = {"device_class": "temperature"}
    sns4_attr = {}

    def set_state(entity_id, state, **kwargs):
        """Set the state."""
        hass.states.set(entity_id, state, **kwargs)
        wait_recording_done(hass)
        return hass.states.get(entity_id)

    zero = dt_util.utcnow()
    one = zero + timedelta(seconds=1 * 5)
    two = one + timedelta(seconds=15 * 5)
    three = two + timedelta(seconds=30 * 5)
    four = three + timedelta(seconds=15 * 5)

    states = {mp: [], sns1: [], sns2: [], sns3: [], sns4: []}
    with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=one):
        states[mp].append(
            set_state(mp, "idle", attributes={"media_title": str(sentinel.mt1)})
        )
        states[mp].append(
            set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt2)})
        )
        states[sns1].append(set_state(sns1, "10", attributes=sns1_attr))
        states[sns2].append(set_state(sns2, "10", attributes=sns2_attr))
        states[sns3].append(set_state(sns3, "10", attributes=sns3_attr))
        states[sns4].append(set_state(sns4, "10", attributes=sns4_attr))

    with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=two):
        states[sns1].append(set_state(sns1, "15", attributes=sns1_attr))
        states[sns2].append(set_state(sns2, "15", attributes=sns2_attr))
        states[sns3].append(set_state(sns3, "15", attributes=sns3_attr))
        states[sns4].append(set_state(sns4, "15", attributes=sns4_attr))

    with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=three):
        states[sns1].append(set_state(sns1, "20", attributes=sns1_attr))
        states[sns2].append(set_state(sns2, "20", attributes=sns2_attr))
        states[sns3].append(set_state(sns3, "20", attributes=sns3_attr))
        states[sns4].append(set_state(sns4, "20", attributes=sns4_attr))

    return zero, four, states
예제 #45
0
 def purge_ticker(event):
     """Rerun purge every second day."""
     self._purge_old_data()
     track_point_in_utc_time(self.hass, purge_ticker,
                             dt_util.utcnow() + timedelta(days=2))
예제 #46
0
 def _close_run(self):
     """Save end time for current run."""
     self._run.end = dt_util.utcnow()
     self._commit(self._run)
     self._run = None
예제 #47
0
async def base_test(
    hass,
    config_device,
    device_name,
    entity_domain,
    array_name_discovery,
    array_name_old_config,
    register_words,
    expected,
    method_discovery=False,
    check_config_only=False,
    config_modbus=None,
    scan_interval=None,
):
    """Run test on device for given config."""

    if config_modbus is None:
        config_modbus = {
            DOMAIN: {
                CONF_NAME: DEFAULT_HUB,
                CONF_TYPE: "tcp",
                CONF_HOST: "modbusTest",
                CONF_PORT: 5001,
            },
        }

    mock_sync = mock.MagicMock()
    with mock.patch(
            "homeassistant.components.modbus.modbus.ModbusTcpClient",
            return_value=mock_sync), mock.patch(
                "homeassistant.components.modbus.modbus.ModbusSerialClient",
                return_value=mock_sync,
            ), mock.patch(
                "homeassistant.components.modbus.modbus.ModbusUdpClient",
                return_value=mock_sync):

        # Setup inputs for the sensor
        read_result = ReadResult(register_words)
        mock_sync.read_coils.return_value = read_result
        mock_sync.read_discrete_inputs.return_value = read_result
        mock_sync.read_input_registers.return_value = read_result
        mock_sync.read_holding_registers.return_value = read_result

        # mock timer and add old/new config
        now = dt_util.utcnow()
        with mock.patch("homeassistant.helpers.event.dt_util.utcnow",
                        return_value=now):
            if method_discovery and config_device is not None:
                # setup modbus which in turn does setup for the devices
                config_modbus[DOMAIN].update(
                    {array_name_discovery: [{
                        **config_device
                    }]})
                config_device = None
            assert await async_setup_component(hass, DOMAIN, config_modbus)
            await hass.async_block_till_done()

            # setup platform old style
            if config_device is not None:
                config_device = {
                    entity_domain: {
                        CONF_PLATFORM: DOMAIN,
                        array_name_old_config: [{
                            **config_device,
                        }],
                    }
                }
                if scan_interval is not None:
                    config_device[entity_domain][
                        CONF_SCAN_INTERVAL] = scan_interval
                assert await async_setup_component(hass, entity_domain,
                                                   config_device)
                await hass.async_block_till_done()

        assert DOMAIN in hass.data
        if config_device is not None:
            entity_id = f"{entity_domain}.{device_name}"
            device = hass.states.get(entity_id)
            if device is None:
                pytest.fail("CONFIG failed, see output")
        if check_config_only:
            return

        # Trigger update call with time_changed event
        now = now + timedelta(seconds=scan_interval + 60)
        with mock.patch("homeassistant.helpers.event.dt_util.utcnow",
                        return_value=now):
            async_fire_time_changed(hass, now)
            await hass.async_block_till_done()

        # Check state
        entity_id = f"{entity_domain}.{device_name}"
        return hass.states.get(entity_id).state
예제 #48
0
 def async_set_context(self, context: Context) -> None:
     """Set the context the entity currently operates under."""
     self._context = context
     self._context_set = dt_util.utcnow()
예제 #49
0
 def test_power_off_in_progress(self):
     """Test for power_off_in_progress."""
     self.assertFalse(self.device._power_off_in_progress())
     self.device._end_of_power_off = dt_util.utcnow() + timedelta(
         seconds=15)
     self.assertTrue(self.device._power_off_in_progress())
예제 #50
0
 def time_delta(self):
     """Return the time delta."""
     dt0 = self._start_time
     dt1 = dt_util.utcnow()
     return dt1 - dt0
예제 #51
0
def test_external_statistics(hass_recorder, caplog):
    """Test inserting external statistics."""
    hass = hass_recorder()
    wait_recording_done(hass)
    assert "Compiling statistics for" not in caplog.text
    assert "Statistics already compiled" not in caplog.text

    zero = dt_util.utcnow()
    period1 = zero.replace(minute=0, second=0, microsecond=0) + timedelta(hours=1)
    period2 = zero.replace(minute=0, second=0, microsecond=0) + timedelta(hours=2)

    external_statistics1 = {
        "start": period1,
        "last_reset": None,
        "state": 0,
        "sum": 2,
    }
    external_statistics2 = {
        "start": period2,
        "last_reset": None,
        "state": 1,
        "sum": 3,
    }

    external_metadata = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_energy_import",
        "unit_of_measurement": "kWh",
    }

    async_add_external_statistics(
        hass, external_metadata, (external_statistics1, external_statistics2)
    )
    wait_recording_done(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(0.0),
                "sum": approx(2.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }
    statistic_ids = list_statistic_ids(hass)
    assert statistic_ids == [
        {
            "statistic_id": "test:total_energy_import",
            "name": "Total imported energy",
            "source": "test",
            "unit_of_measurement": "kWh",
        }
    ]
    metadata = get_metadata(hass, statistic_ids=("test:total_energy_import",))
    assert metadata == {
        "test:total_energy_import": (
            1,
            {
                "has_mean": False,
                "has_sum": True,
                "name": "Total imported energy",
                "source": "test",
                "statistic_id": "test:total_energy_import",
                "unit_of_measurement": "kWh",
            },
        )
    }
    last_stats = get_last_statistics(hass, 1, "test:total_energy_import", True)
    assert last_stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }

    # Update the previously inserted statistics
    external_statistics = {
        "start": period1,
        "last_reset": None,
        "state": 5,
        "sum": 6,
    }
    async_add_external_statistics(hass, external_metadata, (external_statistics,))
    wait_recording_done(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(5.0),
                "sum": approx(6.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }

    # Update the previously inserted statistics
    external_statistics = {
        "start": period1,
        "max": 1,
        "mean": 2,
        "min": 3,
        "last_reset": None,
        "state": 4,
        "sum": 5,
    }
    async_add_external_statistics(hass, external_metadata, (external_statistics,))
    wait_recording_done(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": approx(1.0),
                "mean": approx(2.0),
                "min": approx(3.0),
                "last_reset": None,
                "state": approx(4.0),
                "sum": approx(5.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }
예제 #52
0
def _elapsed_time_to_timestamp(probe: MeaterProbe) -> datetime | None:
    """Convert elapsed time to timestamp."""
    if not probe.cook:
        return None
    return dt_util.utcnow() - timedelta(seconds=probe.cook.time_elapsed)
예제 #53
0
    def _async_write_ha_state(self) -> None:
        """Write the state to the state machine."""
        if self.registry_entry and self.registry_entry.disabled_by:
            if not self._disabled_reported:
                self._disabled_reported = True
                assert self.platform is not None
                _LOGGER.warning(
                    "Entity %s is incorrectly being triggered for updates while it is disabled. This is a bug in the %s integration",
                    self.entity_id,
                    self.platform.platform_name,
                )
            return

        start = timer()

        attr = self.capability_attributes
        attr = dict(attr) if attr else {}

        if not self.available:
            state = STATE_UNAVAILABLE
        else:
            sstate = self.state
            state = STATE_UNKNOWN if sstate is None else str(sstate)
            attr.update(self.state_attributes or {})
            attr.update(self.device_state_attributes or {})

        unit_of_measurement = self.unit_of_measurement
        if unit_of_measurement is not None:
            attr[ATTR_UNIT_OF_MEASUREMENT] = unit_of_measurement

        entry = self.registry_entry
        # pylint: disable=consider-using-ternary
        name = (entry and entry.name) or self.name
        if name is not None:
            attr[ATTR_FRIENDLY_NAME] = name

        icon = (entry and entry.icon) or self.icon
        if icon is not None:
            attr[ATTR_ICON] = icon

        entity_picture = self.entity_picture
        if entity_picture is not None:
            attr[ATTR_ENTITY_PICTURE] = entity_picture

        assumed_state = self.assumed_state
        if assumed_state:
            attr[ATTR_ASSUMED_STATE] = assumed_state

        supported_features = self.supported_features
        if supported_features is not None:
            attr[ATTR_SUPPORTED_FEATURES] = supported_features

        device_class = self.device_class
        if device_class is not None:
            attr[ATTR_DEVICE_CLASS] = str(device_class)

        end = timer()

        if end - start > 0.4 and not self._slow_reported:
            self._slow_reported = True
            extra = ""
            if "custom_components" in type(self).__module__:
                extra = "Please report it to the custom component author."
            else:
                extra = (
                    "Please create a bug report at "
                    "https://github.com/home-assistant/home-assistant/issues?q=is%3Aopen+is%3Aissue"
                )
                if self.platform:
                    extra += (
                        f"+label%3A%22integration%3A+{self.platform.platform_name}%22"
                    )

            _LOGGER.warning(
                "Updating state for %s (%s) took %.3f seconds. %s",
                self.entity_id,
                type(self),
                end - start,
                extra,
            )

        # Overwrite properties that have been set in the config file.
        assert self.hass is not None
        if DATA_CUSTOMIZE in self.hass.data:
            attr.update(self.hass.data[DATA_CUSTOMIZE].get(self.entity_id))

        # Convert temperature if we detect one
        try:
            unit_of_measure = attr.get(ATTR_UNIT_OF_MEASUREMENT)
            units = self.hass.config.units
            if (unit_of_measure in (TEMP_CELSIUS, TEMP_FAHRENHEIT)
                    and unit_of_measure != units.temperature_unit):
                prec = len(state) - state.index(".") - 1 if "." in state else 0
                temp = units.temperature(float(state), unit_of_measure)
                state = str(round(temp) if prec == 0 else round(temp, prec))
                attr[ATTR_UNIT_OF_MEASUREMENT] = units.temperature_unit
        except ValueError:
            # Could not convert state to float
            pass

        if (self._context_set is not None
                and dt_util.utcnow() - self._context_set >
                self.context_recent_time):
            self._context = None
            self._context_set = None

        self.hass.states.async_set(self.entity_id, state, attr,
                                   self.force_update, self._context)
예제 #54
0
def test_external_statistics_errors(hass_recorder, caplog):
    """Test validation of external statistics."""
    hass = hass_recorder()
    wait_recording_done(hass)
    assert "Compiling statistics for" not in caplog.text
    assert "Statistics already compiled" not in caplog.text

    zero = dt_util.utcnow()
    period1 = zero.replace(minute=0, second=0, microsecond=0) + timedelta(hours=1)

    _external_statistics = {
        "start": period1,
        "last_reset": None,
        "state": 0,
        "sum": 2,
    }

    _external_metadata = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_energy_import",
        "unit_of_measurement": "kWh",
    }

    # Attempt to insert statistics for an entity
    external_metadata = {
        **_external_metadata,
        "statistic_id": "sensor.total_energy_import",
    }
    external_statistics = {**_external_statistics}
    with pytest.raises(HomeAssistantError):
        async_add_external_statistics(hass, external_metadata, (external_statistics,))
    wait_recording_done(hass)
    assert statistics_during_period(hass, zero, period="hour") == {}
    assert list_statistic_ids(hass) == []
    assert get_metadata(hass, statistic_ids=("sensor.total_energy_import",)) == {}

    # Attempt to insert statistics for the wrong domain
    external_metadata = {**_external_metadata, "source": "other"}
    external_statistics = {**_external_statistics}
    with pytest.raises(HomeAssistantError):
        async_add_external_statistics(hass, external_metadata, (external_statistics,))
    wait_recording_done(hass)
    assert statistics_during_period(hass, zero, period="hour") == {}
    assert list_statistic_ids(hass) == []
    assert get_metadata(hass, statistic_ids=("test:total_energy_import",)) == {}

    # Attempt to insert statistics for an naive starting time
    external_metadata = {**_external_metadata}
    external_statistics = {
        **_external_statistics,
        "start": period1.replace(tzinfo=None),
    }
    with pytest.raises(HomeAssistantError):
        async_add_external_statistics(hass, external_metadata, (external_statistics,))
    wait_recording_done(hass)
    assert statistics_during_period(hass, zero, period="hour") == {}
    assert list_statistic_ids(hass) == []
    assert get_metadata(hass, statistic_ids=("test:total_energy_import",)) == {}

    # Attempt to insert statistics for an invalid starting time
    external_metadata = {**_external_metadata}
    external_statistics = {**_external_statistics, "start": period1.replace(minute=1)}
    with pytest.raises(HomeAssistantError):
        async_add_external_statistics(hass, external_metadata, (external_statistics,))
    wait_recording_done(hass)
    assert statistics_during_period(hass, zero, period="hour") == {}
    assert list_statistic_ids(hass) == []
    assert get_metadata(hass, statistic_ids=("test:total_energy_import",)) == {}
예제 #55
0
 def is_on(self):
     """Return true if movement has happened within the rearm time."""
     return self._state and (self.invalidate_after is None
                             or self.invalidate_after > dt_util.utcnow())
예제 #56
0
def test_monthly_statistics(hass_recorder, caplog, timezone):
    """Test inserting external statistics."""
    dt_util.set_default_time_zone(dt_util.get_time_zone(timezone))

    hass = hass_recorder()
    wait_recording_done(hass)
    assert "Compiling statistics for" not in caplog.text
    assert "Statistics already compiled" not in caplog.text

    zero = dt_util.utcnow()
    period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
    period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00"))
    period3 = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
    period4 = dt_util.as_utc(dt_util.parse_datetime("2021-10-31 23:00:00"))

    external_statistics = (
        {
            "start": period1,
            "last_reset": None,
            "state": 0,
            "sum": 2,
        },
        {
            "start": period2,
            "last_reset": None,
            "state": 1,
            "sum": 3,
        },
        {
            "start": period3,
            "last_reset": None,
            "state": 2,
            "sum": 4,
        },
        {
            "start": period4,
            "last_reset": None,
            "state": 3,
            "sum": 5,
        },
    )
    external_metadata = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_energy_import",
        "unit_of_measurement": "kWh",
    }

    async_add_external_statistics(hass, external_metadata, external_statistics)
    wait_recording_done(hass)
    stats = statistics_during_period(hass, zero, period="month")
    sep_start = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
    sep_end = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
    oct_start = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
    oct_end = dt_util.as_utc(dt_util.parse_datetime("2021-11-01 00:00:00"))
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": sep_start.isoformat(),
                "end": sep_end.isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": oct_start.isoformat(),
                "end": oct_end.isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(3.0),
                "sum": approx(5.0),
            },
        ]
    }

    dt_util.set_default_time_zone(dt_util.get_time_zone("UTC"))
예제 #57
0
def test_delete_duplicates_non_identical(caplog, tmpdir):
    """Test removal of duplicated statistics."""
    test_db_file = tmpdir.mkdir("sqlite").join("test_run_info.db")
    dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"

    module = "tests.components.recorder.models_schema_23"
    importlib.import_module(module)
    old_models = sys.modules[module]

    period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
    period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00"))
    period3 = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
    period4 = dt_util.as_utc(dt_util.parse_datetime("2021-10-31 23:00:00"))

    external_energy_statistics_1 = (
        {
            "start": period1,
            "last_reset": None,
            "state": 0,
            "sum": 2,
        },
        {
            "start": period2,
            "last_reset": None,
            "state": 1,
            "sum": 3,
        },
        {
            "start": period3,
            "last_reset": None,
            "state": 2,
            "sum": 4,
        },
        {
            "start": period4,
            "last_reset": None,
            "state": 3,
            "sum": 5,
        },
        {
            "start": period4,
            "last_reset": None,
            "state": 3,
            "sum": 6,
        },
    )
    external_energy_metadata_1 = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_energy_import_tariff_1",
        "unit_of_measurement": "kWh",
    }
    external_energy_statistics_2 = (
        {
            "start": period1,
            "last_reset": None,
            "state": 0,
            "sum": 20,
        },
        {
            "start": period2,
            "last_reset": None,
            "state": 1,
            "sum": 30,
        },
        {
            "start": period3,
            "last_reset": None,
            "state": 2,
            "sum": 40,
        },
        {
            "start": period4,
            "last_reset": None,
            "state": 3,
            "sum": 50,
        },
        {
            "start": period4,
            "last_reset": None,
            "state": 3,
            "sum": 50,
        },
    )
    external_energy_metadata_2 = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_energy_import_tariff_2",
        "unit_of_measurement": "kWh",
    }

    # Create some duplicated statistics with schema version 23
    with patch.object(recorder, "models", old_models), patch.object(
        recorder.migration, "SCHEMA_VERSION", old_models.SCHEMA_VERSION
    ), patch(
        "homeassistant.components.recorder.create_engine", new=_create_engine_test
    ):
        hass = get_test_home_assistant()
        setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
        wait_recording_done(hass)
        wait_recording_done(hass)

        with session_scope(hass=hass) as session:
            session.add(
                recorder.models.StatisticsMeta.from_meta(external_energy_metadata_1)
            )
            session.add(
                recorder.models.StatisticsMeta.from_meta(external_energy_metadata_2)
            )
        with session_scope(hass=hass) as session:
            for stat in external_energy_statistics_1:
                session.add(recorder.models.Statistics.from_stats(1, stat))
            for stat in external_energy_statistics_2:
                session.add(recorder.models.Statistics.from_stats(2, stat))

        hass.stop()

    # Test that the duplicates are removed during migration from schema 23
    hass = get_test_home_assistant()
    hass.config.config_dir = tmpdir
    setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
    hass.start()
    wait_recording_done(hass)
    wait_recording_done(hass)
    hass.stop()

    assert "Deleted 2 duplicated statistics rows" in caplog.text
    assert "Deleted 1 non identical" in caplog.text
    assert "Found duplicated" not in caplog.text

    isotime = dt_util.utcnow().isoformat()
    backup_file_name = f".storage/deleted_statistics.{isotime}.json"

    with open(hass.config.path(backup_file_name)) as backup_file:
        backup = json.load(backup_file)

    assert backup == [
        {
            "duplicate": {
                "created": "2021-08-01T00:00:00",
                "id": 4,
                "last_reset": None,
                "max": None,
                "mean": None,
                "metadata_id": 1,
                "min": None,
                "start": "2021-10-31T23:00:00",
                "state": 3.0,
                "sum": 5.0,
            },
            "original": {
                "created": "2021-08-01T00:00:00",
                "id": 5,
                "last_reset": None,
                "max": None,
                "mean": None,
                "metadata_id": 1,
                "min": None,
                "start": "2021-10-31T23:00:00",
                "state": 3.0,
                "sum": 6.0,
            },
        }
    ]
예제 #58
0
파일: test_number.py 프로젝트: jbouwh/core
async def test_numbers(
    hass: HomeAssistant,
    mock_fully_kiosk: MagicMock,
    init_integration: MockConfigEntry,
) -> None:
    """Test standard Fully Kiosk numbers."""
    entity_registry = er.async_get(hass)
    device_registry = dr.async_get(hass)

    state = hass.states.get("number.amazon_fire_screensaver_timer")
    assert state
    assert state.state == "900"
    entry = entity_registry.async_get("number.amazon_fire_screensaver_timer")
    assert entry
    assert entry.unique_id == "abcdef-123456-timeToScreensaverV2"
    await set_value(hass, "number.amazon_fire_screensaver_timer", 600)
    assert len(mock_fully_kiosk.setConfigurationString.mock_calls) == 1

    state = hass.states.get("number.amazon_fire_screensaver_brightness")
    assert state
    assert state.state == "0"
    entry = entity_registry.async_get(
        "number.amazon_fire_screensaver_brightness")
    assert entry
    assert entry.unique_id == "abcdef-123456-screensaverBrightness"

    state = hass.states.get("number.amazon_fire_screen_off_timer")
    assert state
    assert state.state == "0"
    entry = entity_registry.async_get("number.amazon_fire_screen_off_timer")
    assert entry
    assert entry.unique_id == "abcdef-123456-timeToScreenOffV2"

    state = hass.states.get("number.amazon_fire_screen_brightness")
    assert state
    assert state.state == "9"
    entry = entity_registry.async_get("number.amazon_fire_screen_brightness")
    assert entry
    assert entry.unique_id == "abcdef-123456-screenBrightness"

    # Test invalid numeric data
    mock_fully_kiosk.getSettings.return_value = {"screenBrightness": "invalid"}
    async_fire_time_changed(hass, dt.utcnow() + UPDATE_INTERVAL)
    await hass.async_block_till_done()

    state = hass.states.get("number.amazon_fire_screen_brightness")
    assert state
    assert state.state == STATE_UNKNOWN

    # Test unknown/missing data
    mock_fully_kiosk.getSettings.return_value = {}
    async_fire_time_changed(hass, dt.utcnow() + UPDATE_INTERVAL)
    await hass.async_block_till_done()

    state = hass.states.get("number.amazon_fire_screensaver_timer")
    assert state
    assert state.state == STATE_UNKNOWN

    assert entry.device_id
    device_entry = device_registry.async_get(entry.device_id)
    assert device_entry
    assert device_entry.configuration_url == "http://192.168.1.234:2323"
    assert device_entry.entry_type is None
    assert device_entry.hw_version is None
    assert device_entry.identifiers == {(DOMAIN, "abcdef-123456")}
    assert device_entry.manufacturer == "amzn"
    assert device_entry.model == "KFDOWI"
    assert device_entry.name == "Amazon Fire"
    assert device_entry.sw_version == "1.42.5"
예제 #59
0
 def time_fired(self):
     """Time event was fired in utc."""
     if not self._time_fired:
         self._time_fired = (process_timestamp(self._row.time_fired)
                             or dt_util.utcnow())
     return self._time_fired
예제 #60
0
 def native_value(self) -> str:
     """Return the state of the sensor."""
     uptime = utcnow() - timedelta(
         seconds=self.coordinator.data.info.uptime)
     return uptime.replace(microsecond=0).isoformat()