def test_include_filters(hass):
    """Test inclusion filters."""
    request = get_new_request('Alexa.Discovery', 'Discover')

    # setup test devices
    hass.states.async_set(
        'switch.deny', 'on', {'friendly_name': "Blocked switch"})

    hass.states.async_set(
        'script.deny', 'off', {'friendly_name': "Blocked script"})

    hass.states.async_set(
        'automation.allow', 'off', {'friendly_name': "Allowed automation"})

    hass.states.async_set(
        'group.allow', 'off', {'friendly_name': "Allowed group"})

    config = smart_home.Config(should_expose=entityfilter.generate_filter(
        include_domains=['automation', 'group'],
        include_entities=['script.deny'],
        exclude_domains=[],
        exclude_entities=[],
    ))

    msg = yield from smart_home.async_handle_message(hass, config, request)
    yield from hass.async_block_till_done()

    msg = msg['event']

    assert len(msg['payload']['endpoints']) == 3
async def test_exclude_filters(hass):
    """Test exclusion filters."""
    request = get_new_request('Alexa.Discovery', 'Discover')

    # setup test devices
    hass.states.async_set(
        'switch.test', 'on', {'friendly_name': "Test switch"})

    hass.states.async_set(
        'script.deny', 'off', {'friendly_name': "Blocked script"})

    hass.states.async_set(
        'cover.deny', 'off', {'friendly_name': "Blocked cover"})

    config = smart_home.Config(should_expose=entityfilter.generate_filter(
        include_domains=[],
        include_entities=[],
        exclude_domains=['script'],
        exclude_entities=['cover.deny'],
    ))

    msg = await smart_home.async_handle_message(hass, config, request)
    await hass.async_block_till_done()

    msg = msg['event']

    assert len(msg['payload']['endpoints']) == 1
def test_no_filters_case_1():
    """If include and exclude not included, pass everything."""
    incl_dom = {}
    incl_ent = {}
    excl_dom = {}
    excl_ent = {}
    testfilter = generate_filter(incl_dom, incl_ent, excl_dom, excl_ent)

    for value in ("sensor.test", "sun.sun", "light.test"):
        assert testfilter(value)
def test_includes_only_case_2():
    """If include specified, only pass if specified (Case 2)."""
    incl_dom = {'light', 'sensor'}
    incl_ent = {'binary_sensor.working'}
    excl_dom = {}
    excl_ent = {}
    testfilter = generate_filter(incl_dom, incl_ent, excl_dom, excl_ent)

    assert testfilter("sensor.test")
    assert testfilter("light.test")
    assert testfilter("binary_sensor.working")
    assert testfilter("binary_sensor.notworking") is False
    assert testfilter("sun.sun") is False
def test_excludes_only_case_3():
    """If exclude specified, pass all but specified (Case 3)."""
    incl_dom = {}
    incl_ent = {}
    excl_dom = {'light', 'sensor'}
    excl_ent = {'binary_sensor.working'}
    testfilter = generate_filter(incl_dom, incl_ent, excl_dom, excl_ent)

    assert testfilter("sensor.test") is False
    assert testfilter("light.test") is False
    assert testfilter("binary_sensor.working") is False
    assert testfilter("binary_sensor.another")
    assert testfilter("sun.sun") is True
def async_setup(hass, config):
    """Set up the MQTT state feed."""
    conf = config.get(DOMAIN, {})
    base_topic = conf.get(CONF_BASE_TOPIC)
    pub_include = conf.get(CONF_INCLUDE, {})
    pub_exclude = conf.get(CONF_EXCLUDE, {})
    publish_attributes = conf.get(CONF_PUBLISH_ATTRIBUTES)
    publish_timestamps = conf.get(CONF_PUBLISH_TIMESTAMPS)
    publish_filter = generate_filter(pub_include.get(CONF_DOMAINS, []),
                                     pub_include.get(CONF_ENTITIES, []),
                                     pub_exclude.get(CONF_DOMAINS, []),
                                     pub_exclude.get(CONF_ENTITIES, []))
    if not base_topic.endswith('/'):
        base_topic = base_topic + '/'

    @callback
    def _state_publisher(entity_id, old_state, new_state):
        if new_state is None:
            return

        if not publish_filter(entity_id):
            return

        payload = new_state.state

        mybase = base_topic + entity_id.replace('.', '/') + '/'
        hass.components.mqtt.async_publish(mybase + 'state', payload, 1, True)

        if publish_timestamps:
            if new_state.last_updated:
                hass.components.mqtt.async_publish(
                    mybase + 'last_updated',
                    new_state.last_updated.isoformat(),
                    1,
                    True)
            if new_state.last_changed:
                hass.components.mqtt.async_publish(
                    mybase + 'last_changed',
                    new_state.last_changed.isoformat(),
                    1,
                    True)

        if publish_attributes:
            for key, val in new_state.attributes.items():
                if val:
                    encoded_val = json.dumps(val, cls=JSONEncoder)
                    hass.components.mqtt.async_publish(mybase + key,
                                                       encoded_val, 1, True)

    async_track_state_change(hass, MATCH_ALL, _state_publisher)
    return True
def test_no_domain_case4c():
    """Test case 4c - include and exclude specified, with no domains."""
    incl_dom = {}
    incl_ent = {'binary_sensor.working'}
    excl_dom = {}
    excl_ent = {'light.ignoreme', 'sensor.notworking'}
    testfilter = generate_filter(incl_dom, incl_ent, excl_dom, excl_ent)

    assert testfilter("sensor.test") is False
    assert testfilter("sensor.notworking") is False
    assert testfilter("light.test") is False
    assert testfilter("light.ignoreme") is False
    assert testfilter("binary_sensor.working")
    assert testfilter("binary_sensor.another") is False
    assert testfilter("sun.sun") is False
Exemple #8
0
    def test_homekit_entity_filter(self):
        """Test the entity filter."""
        entity_filter = generate_filter(['cover'], ['demo.test'], [], [])
        homekit = HomeKit(self.hass, None, entity_filter, {})

        with patch(PATH_HOMEKIT + '.get_accessory') as mock_get_acc:
            mock_get_acc.return_value = None

            homekit.add_bridge_accessory(State('cover.test', 'open'))
            self.assertTrue(mock_get_acc.called)
            mock_get_acc.reset_mock()

            homekit.add_bridge_accessory(State('demo.test', 'on'))
            self.assertTrue(mock_get_acc.called)
            mock_get_acc.reset_mock()

            homekit.add_bridge_accessory(State('light.demo', 'light'))
            self.assertFalse(mock_get_acc.called)
def _generate_filter_from_config(config):
    from homeassistant.helpers.entityfilter import generate_filter

    excluded_entities = []
    excluded_domains = []
    included_entities = []
    included_domains = []

    exclude = config.get(CONF_EXCLUDE)
    if exclude:
        excluded_entities = exclude.get(CONF_ENTITIES, [])
        excluded_domains = exclude.get(CONF_DOMAINS, [])
    include = config.get(CONF_INCLUDE)
    if include:
        included_entities = include.get(CONF_ENTITIES, [])
        included_domains = include.get(CONF_DOMAINS, [])

    return generate_filter(included_domains, included_entities,
                           excluded_domains, excluded_entities)
Exemple #10
0
    def __init__(self, hass: HomeAssistant, keep_days: int,
                 purge_interval: int, uri: str,
                 include: Dict, exclude: Dict) -> None:
        """Initialize the recorder."""
        threading.Thread.__init__(self, name='Recorder')

        self.hass = hass
        self.keep_days = keep_days
        self.purge_interval = purge_interval
        self.queue = queue.Queue()  # type: Any
        self.recording_start = dt_util.utcnow()
        self.db_url = uri
        self.async_db_ready = asyncio.Future(loop=hass.loop)
        self.engine = None  # type: Any
        self.run_info = None  # type: Any

        self.entity_filter = generate_filter(
            include.get(CONF_DOMAINS, []), include.get(CONF_ENTITIES, []),
            exclude.get(CONF_DOMAINS, []), exclude.get(CONF_ENTITIES, []))
        self.exclude_t = exclude.get(CONF_EVENT_TYPES, [])

        self.get_session = None
Exemple #11
0
    def __init__(self, hass: HomeAssistant, keep_days: int,
                 purge_interval: int, uri: str, include: Dict,
                 exclude: Dict) -> None:
        """Initialize the recorder."""
        threading.Thread.__init__(self, name='Recorder')

        self.hass = hass
        self.keep_days = keep_days
        self.purge_interval = purge_interval
        self.queue = queue.Queue()  # type: Any
        self.recording_start = dt_util.utcnow()
        self.db_url = uri
        self.async_db_ready = asyncio.Future(loop=hass.loop)
        self.engine = None  # type: Any
        self.run_info = None  # type: Any

        self.entity_filter = generate_filter(include.get(CONF_DOMAINS, []),
                                             include.get(CONF_ENTITIES, []),
                                             exclude.get(CONF_DOMAINS, []),
                                             exclude.get(CONF_ENTITIES, []))
        self.exclude_t = exclude.get(CONF_EVENT_TYPES, [])

        self.get_session = None
Exemple #12
0
async def test_never_exposed_entities(hass):
    """Test never exposed locks do not get discovered."""
    request = get_new_request('Alexa.Discovery', 'Discover')

    # setup test devices
    hass.states.async_set(
        'group.all_locks', 'on', {'friendly_name': "Blocked locks"})

    hass.states.async_set(
        'group.allow', 'off', {'friendly_name': "Allowed group"})

    config = smart_home.Config(should_expose=entityfilter.generate_filter(
        include_domains=['group'],
        include_entities=[],
        exclude_domains=[],
        exclude_entities=[],
    ))

    msg = await smart_home.async_handle_message(hass, config, request)
    await hass.async_block_till_done()

    msg = msg['event']

    assert len(msg['payload']['endpoints']) == 1
Exemple #13
0
_LOGGER = logging.getLogger(__name__)

CONF_ALEXA = 'alexa'
CONF_ALEXA_FILTER = 'filter'
CONF_COGNITO_CLIENT_ID = 'cognito_client_id'
CONF_RELAYER = 'relayer'
CONF_USER_POOL_ID = 'user_pool_id'

MODE_DEV = 'development'
DEFAULT_MODE = MODE_DEV
DEPENDENCIES = ['http']

ALEXA_SCHEMA = vol.Schema({
    vol.Optional(CONF_ALEXA_FILTER,
                 default=lambda: entityfilter.generate_filter([], [], [], [])):
    entityfilter.FILTER_SCHEMA,
})

CONFIG_SCHEMA = vol.Schema(
    {
        DOMAIN:
        vol.Schema({
            vol.Optional(CONF_MODE, default=DEFAULT_MODE):
            vol.In([MODE_DEV] + list(SERVERS)),
            # Change to optional when we include real servers
            vol.Required(CONF_COGNITO_CLIENT_ID):
            str,
            vol.Required(CONF_USER_POOL_ID):
            str,
            vol.Required(CONF_REGION):
Exemple #14
0
def async_setup(hass, config):
    """Set up the MQTT state feed."""
    mqtt = hass.components.mqtt
    conf = config.get(DOMAIN, {})
    base_topic = conf.get(CONF_BASE_TOPIC)
    if not base_topic.endswith('/'):
        base_topic = base_topic + '/'
    event_topic = base_topic + 'event'
    control_topic = base_topic + 'control'
    pub_include = conf.get(CONF_INCLUDE, {})
    pub_exclude = conf.get(CONF_EXCLUDE, {})
    publish_filter = generate_filter(pub_include.get(CONF_DOMAINS, []),
                                     pub_include.get(CONF_ENTITIES, []),
                                     pub_exclude.get(CONF_DOMAINS, []),
                                     pub_exclude.get(CONF_ENTITIES, []))

    @callback
    def _control_listener(topic, payload, qos):
        """Receive remote control events from mqtt_shareclient."""
        # split = topic.split('/')
        # domain = split[1]
        # object_id = split[2]
        # entity_id = domain + '.' + object_id
        # must be an entity we are configured to listen for commands
        # if not publish_filter(entity_id):
        #     return
        # process payload as JSON
        event = json.loads(payload)
        # # must be a call_service event_type
        # #  not really necessary because only call_service events are published
        # if event.get('event_type') != EVENT_CALL_SERVICE:
        #     return
        event_type = event.get('event_type')
        event_data = event.get('event_data')
        domain = event_data.get(ATTR_DOMAIN)
        service = event_data.get(ATTR_SERVICE)
        data = event_data.get(ATTR_SERVICE_DATA)
        hass.async_add_job(hass.services.async_call(domain, service, data))
        # _LOGGER.warning("Received remote {} event, data={}".format(
        #  event_type, event_data))

    # subscribe to all control topics
    yield from mqtt.async_subscribe(control_topic, _control_listener)

    @callback
    def _state_publisher(entity_id, old_state, new_state):
        """Publish local states to mqtt_shareclient."""
        if new_state is None:
            return
        # do not publish entities not configured
        if not publish_filter(entity_id):
            return
        # start the current state dictionary
        state = {"state": new_state.state}
        state.update(dict(new_state.attributes))
        # convert state dictionary into JSON
        payload = json.dumps(state, cls=JSONEncoder)
        # create topic from entity_id
        topic = base_topic + entity_id.replace('.', '/') + '/state'
        # publish the topic, retain should be on for state topics
        hass.components.mqtt.async_publish(topic, payload, 0, True)
        # _LOGGER.warning("Published state for '{}', state={}".format(
        #   entity_id, payload))

    # asynchronous receive state changes
    async_track_state_change(hass, MATCH_ALL, _state_publisher)

    @callback
    def _event_publisher(event):
        """Publish local isy994_control events to mqtt_shareclient."""
        # we only publish local events
        if event.origin != EventOrigin.local:
            return
        # must be isy994_control event
        if event.event_type != 'isy994_control':
            return
        entity_id = event.data.get(ATTR_ENTITY_ID)
        # must be one of our entities
        if not publish_filter(entity_id):
            return
        event_info = {'event_type': event.event_type, 'event_data': event.data}
        payload = json.dumps(event_info, cls=JSONEncoder)
        # publish the topic, retain should be off for events
        hass.components.mqtt.async_publish(event_topic, payload, 0, False)
        # _LOGGER.warning("Publish local event '{}' data={}".format(
        #     event.event_type, event.data))

    # listen for local events if you are going to publish them.
    hass.bus.async_listen(MATCH_ALL, _event_publisher)

    return True
Exemple #15
0
def _get_events(
    hass,
    start_day,
    end_day,
    entity_ids=None,
    filters=None,
    entities_filter=None,
    entity_matches_only=False,
    context_id=None,
):
    """Get events for a period of time."""
    assert not (entity_ids
                and context_id), "can't pass in both entity_ids and context_id"

    entity_attr_cache = EntityAttributeCache(hass)
    context_lookup = {None: None}

    def yield_events(query):
        """Yield Events that are not filtered away."""
        for row in query.yield_per(1000):
            event = LazyEventPartialState(row)
            context_lookup.setdefault(event.context_id, event)
            if event.event_type == EVENT_CALL_SERVICE:
                continue
            if event.event_type == EVENT_STATE_CHANGED or _keep_event(
                    hass, event, entities_filter):
                yield event

    if entity_ids is not None:
        entities_filter = generate_filter([], entity_ids, [], [])

    with session_scope(hass=hass) as session:
        old_state = aliased(States, name="old_state")

        if entity_ids is not None:
            query = _generate_events_query_without_states(session)
            query = _apply_event_time_filter(query, start_day, end_day)
            query = _apply_event_types_filter(
                hass, query, ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED)
            if entity_matches_only:
                # When entity_matches_only is provided, contexts and events that do not
                # contain the entity_ids are not included in the logbook response.
                query = _apply_event_entity_id_matchers(query, entity_ids)

            query = query.union_all(
                _generate_states_query(session, start_day, end_day, old_state,
                                       entity_ids))
        else:
            query = _generate_events_query(session)
            query = _apply_event_time_filter(query, start_day, end_day)
            query = _apply_events_types_and_states_filter(
                hass, query,
                old_state).filter((States.last_updated == States.last_changed)
                                  | (Events.event_type != EVENT_STATE_CHANGED))
            if filters:
                query = query.filter(filters.entity_filter() | (
                    Events.event_type != EVENT_STATE_CHANGED))

            if context_id is not None:
                query = query.filter(Events.context_id == context_id)

        query = query.order_by(Events.time_fired)

        return list(
            humanify(hass, yield_events(query), entity_attr_cache,
                     context_lookup))
Exemple #16
0
def _get_events(
    hass: HomeAssistant,
    start_day: dt,
    end_day: dt,
    entity_ids: list[str] | None = None,
    filters: Filters | None = None,
    entities_filter: EntityFilter | Callable[[str], bool] | None = None,
    entity_matches_only: bool = False,
    context_id: str | None = None,
) -> list[dict[str, Any]]:
    """Get events for a period of time."""
    assert not (entity_ids
                and context_id), "can't pass in both entity_ids and context_id"

    entity_name_cache = EntityNameCache(hass)
    event_data_cache: dict[str, dict[str, Any]] = {}
    context_lookup: dict[str | None, Row | None] = {None: None}
    event_cache = EventCache(event_data_cache)
    external_events = hass.data.get(DOMAIN, {})
    context_augmenter = ContextAugmenter(context_lookup, entity_name_cache,
                                         external_events, event_cache)

    def yield_rows(query: Query) -> Generator[Row, None, None]:
        """Yield Events that are not filtered away."""
        for row in query.yield_per(1000):
            context_lookup.setdefault(row.context_id, row)
            if row.event_type != EVENT_CALL_SERVICE and (
                    row.event_type == EVENT_STATE_CHANGED
                    or _keep_row(hass, row, entities_filter)):
                yield row

    if entity_ids is not None:
        entities_filter = generate_filter([], entity_ids, [], [])

    with session_scope(hass=hass) as session:
        old_state = aliased(States, name="old_state")
        query: Query
        query = _generate_events_query_without_states(session)
        query = _apply_event_time_filter(query, start_day, end_day)
        query = _apply_event_types_filter(
            hass, query, ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED)

        if entity_ids is not None:
            if entity_matches_only:
                # When entity_matches_only is provided, contexts and events that do not
                # contain the entity_ids are not included in the logbook response.
                query = _apply_event_entity_id_matchers(query, entity_ids)
            query = query.outerjoin(EventData,
                                    (Events.data_id == EventData.data_id))
            query = query.union_all(
                _generate_states_query(session, start_day, end_day, old_state,
                                       entity_ids))
        else:
            if context_id is not None:
                query = query.filter(Events.context_id == context_id)
            query = query.outerjoin(EventData,
                                    (Events.data_id == EventData.data_id))

            states_query = _generate_states_query(session, start_day, end_day,
                                                  old_state, entity_ids)
            unions: list[Query] = []
            if context_id is not None:
                # Once all the old `state_changed` events
                # are gone from the database remove the
                # _generate_legacy_events_context_id_query
                unions.append(
                    _generate_legacy_events_context_id_query(
                        session, context_id, start_day, end_day))
                states_query = states_query.outerjoin(
                    Events, (States.event_id == Events.event_id))
                states_query = states_query.filter(
                    States.context_id == context_id)
            elif filters:
                states_query = states_query.filter(
                    filters.entity_filter())  # type: ignore[no-untyped-call]
            unions.append(states_query)
            query = query.union_all(*unions)

        query = query.order_by(Events.time_fired)

        return list(
            _humanify(
                hass,
                yield_rows(query),
                entity_name_cache,
                event_cache,
                context_augmenter,
            ))
Exemple #17
0
def _get_events(
    hass,
    start_day,
    end_day,
    entity_id=None,
    filters=None,
    entities_filter=None,
    entity_matches_only=False,
):
    """Get events for a period of time."""
    entity_attr_cache = EntityAttributeCache(hass)
    context_lookup = {None: None}
    entity_id_lower = None
    apply_sql_entities_filter = True

    def yield_events(query):
        """Yield Events that are not filtered away."""
        for row in query.yield_per(1000):
            event = LazyEventPartialState(row)
            context_lookup.setdefault(event.context_id, event)
            if _keep_event(hass, event, entities_filter):
                yield event

    if entity_id is not None:
        entity_id_lower = entity_id.lower()
        if not valid_entity_id(entity_id_lower):
            raise InvalidEntityFormatError(
                f"Invalid entity id encountered: {entity_id_lower}. "
                "Format should be <domain>.<object_id>")
        entities_filter = generate_filter([], [entity_id_lower], [], [])
        apply_sql_entities_filter = False

    with session_scope(hass=hass) as session:
        old_state = aliased(States, name="old_state")

        query = (
            session.query(
                Events.event_type,
                Events.event_data,
                Events.time_fired,
                Events.context_id,
                Events.context_user_id,
                States.state,
                States.entity_id,
                States.domain,
                States.attributes,
            ).order_by(Events.time_fired).outerjoin(
                States, (Events.event_id == States.event_id)).outerjoin(
                    old_state, (States.old_state_id == old_state.state_id))
            # The below filter, removes state change events that do not have
            # and old_state, new_state, or the old and
            # new state.
            #
            .filter((Events.event_type != EVENT_STATE_CHANGED)
                    | ((States.state_id.isnot(None))
                       & (old_state.state_id.isnot(None))
                       & (States.state.isnot(None))
                       & (States.state != old_state.state)))
            #
            # Prefilter out continuous domains that have
            # ATTR_UNIT_OF_MEASUREMENT as its much faster in sql.
            #
            .filter((Events.event_type != EVENT_STATE_CHANGED)
                    | sqlalchemy.not_(States.domain.in_(CONTINUOUS_DOMAINS))
                    | sqlalchemy.not_(
                        States.attributes.contains(UNIT_OF_MEASUREMENT_JSON))).
            filter(
                Events.event_type.in_(ALL_EVENT_TYPES +
                                      list(hass.data.get(DOMAIN, {})))).filter(
                                          (Events.time_fired > start_day)
                                          & (Events.time_fired < end_day)))

        if entity_id_lower is not None:
            if entity_matches_only:
                # When entity_matches_only is provided, contexts and events that do not
                # contain the entity_id are not included in the logbook response.
                entity_id_json = ENTITY_ID_JSON_TEMPLATE.format(
                    entity_id_lower)
                query = query.filter(
                    ((States.last_updated == States.last_changed)
                     & (States.entity_id == entity_id_lower))
                    | (States.state_id.is_(None)
                       & Events.event_data.contains(entity_id_json)))
            else:
                query = query.filter((
                    (States.last_updated == States.last_changed)
                    & (States.entity_id == entity_id_lower))
                                     | (States.state_id.is_(None)))
        else:
            query = query.filter((States.last_updated == States.last_changed)
                                 | (States.state_id.is_(None)))

        if apply_sql_entities_filter and filters:
            entity_filter = filters.entity_filter()
            if entity_filter is not None:
                query = query.filter(entity_filter | (
                    Events.event_type != EVENT_STATE_CHANGED))

        return list(
            humanify(hass, yield_events(query), entity_attr_cache,
                     context_lookup))
def setup(hass, config):
    """Set up the MQTT state feed."""
    conf = config.get(DOMAIN, {})
    base_topic = conf.get(CONF_BASE_TOPIC)
    entity_include = conf.get(CONF_INCLUDE, {})
    entity_exclude = conf.get(CONF_EXCLUDE, {})
    entity_filter = generate_filter(entity_include.get(CONF_DOMAINS, []),
                                    entity_include.get(CONF_ENTITIES, []),
                                    entity_exclude.get(CONF_DOMAINS, []),
                                    entity_exclude.get(CONF_ENTITIES, []))

    if not base_topic.endswith('/'):
        base_topic = base_topic + '/'
    
    def _publish_all_states(*_):
        states = hass.states.all()
        for state in states:
            _state_publisher(state.entity_id, None, state)

    def _handle_hass_status(topic, payload, qos):
        if payload == 'online':
            async_call_later(hass, 20, _publish_all_states)

    def _state_publisher(entity_id, old_state, new_state):
        if new_state is None:
            return

        if not entity_filter(entity_id):
            return

        entity_id_parts = entity_id.split('.')
        domain = entity_id_parts[0]
        entity_state = hass.states.get(entity_id)

        mybase = base_topic + entity_id.replace('.', '/') + '/state'

        if domain == 'switch' or domain == 'binary_sensor':
            data = {}
            if new_state.state == 'on':
                data['state'] = 'ON'
            elif new_state.state == 'off':
                data['state'] = 'OFF'

            payload = json.dumps(data, cls=JSONEncoder)
            hass.components.mqtt.async_publish(mybase, payload, 1, False)
        elif domain == 'light':
            data = {}
            if new_state.state == 'on':
                data['state'] = 'ON'
            elif new_state.state == 'off':
                data['state'] = 'OFF'
            try:
                data['brightness'] = new_state.attributes['brightness']
            except KeyError:
                pass

            payload = json.dumps(data, cls=JSONEncoder)
            hass.components.mqtt.async_publish(mybase, payload, 1, False)
        elif domain == 'lock':
            data = {}
            if new_state.state == 'locked':
                data['state'] = 'LOCK'
            elif new_state.state == 'unlocked':
                data['state'] = 'UNLOCK'
            
            try:
                data['notification'] = entity_state.attributes['notification']
            except KeyError:
                pass
            try:
                data['lock_status'] = entity_state.attributes['lock_status']
            except KeyError:
                pass

            payload = json.dumps(data, cls=JSONEncoder)
            hass.components.mqtt.async_publish(mybase, payload, 1, False)
        elif domain == 'sensor':
            data = {}
            data['state'] = new_state.state

            payload = json.dumps(data, cls=JSONEncoder)
            hass.components.mqtt.async_publish(mybase, payload, 1, False)

    def _state_message_received(topic, payload, qos):
        """Handle new MQTT state messages."""
        # Parse entity from topic
        topic_parts = topic.split('/')
        domain = topic_parts[-3]
        entity = topic_parts[-2]

        entity_id = "{0}.{1}".format(domain, entity)
        if not entity_filter(entity_id):
            return

        
        data = {ATTR_ENTITY_ID: entity_id}
        if domain == 'switch':
            if payload == mqtt_switch.DEFAULT_PAYLOAD_ON:
                hass.async_add_job(hass.services.async_call(domain, SERVICE_TURN_ON, data))
            if payload == mqtt_switch.DEFAULT_PAYLOAD_OFF:
                hass.async_add_job(hass.services.async_call(domain, SERVICE_TURN_OFF, data))
        elif domain == 'light':
            payload = json.loads(payload)
            if payload['state'] == 'ON':
                try:
                    data['brightness'] = payload['brightness']
                except KeyError:
                    pass
                hass.async_add_job(hass.services.async_call(domain, SERVICE_TURN_ON, data))
            if payload['state'] == 'OFF':
                hass.async_add_job(hass.services.async_call(domain, SERVICE_TURN_OFF, data))
        elif domain == 'lock':
            if payload == mqtt_lock.DEFAULT_PAYLOAD_LOCK:
                hass.async_add_job(hass.services.async_call(domain, SERVICE_LOCK, data))
            if payload == mqtt_lock.DEFAULT_PAYLOAD_UNLOCK:
                hass.async_add_job(hass.services.async_call(domain, SERVICE_UNLOCK, data))
    
    async_track_state_change(hass, MATCH_ALL, _state_publisher)
    mqtt.subscribe(hass, base_topic+'+/+/set', _state_message_received)
    mqtt.subscribe(hass, 'hass/status', _handle_hass_status)
    return True
Exemple #19
0
def _get_events(hass, config, start_day, end_day, entity_id=None):
    """Get events for a period of time."""
    entity_attr_cache = EntityAttributeCache(hass)

    def yield_events(query):
        """Yield Events that are not filtered away."""
        for row in query.yield_per(1000):
            event = LazyEventPartialState(row)
            if _keep_event(hass, event, entities_filter, entity_attr_cache):
                yield event

    with session_scope(hass=hass) as session:
        if entity_id is not None:
            entity_ids = [entity_id.lower()]
            entities_filter = generate_filter([], entity_ids, [], [])
        elif config.get(CONF_EXCLUDE) or config.get(CONF_INCLUDE):
            entities_filter = convert_include_exclude_filter(config)
            entity_ids = _get_related_entity_ids(session, entities_filter)
        else:
            entities_filter = _all_entities_filter
            entity_ids = None

        old_state = aliased(States, name="old_state")

        query = (
            session.query(
                Events.event_type,
                Events.event_data,
                Events.time_fired,
                Events.context_user_id,
                States.state_id,
                States.state,
                States.entity_id,
                States.domain,
                States.attributes,
                old_state.state_id.label("old_state_id"),
            ).order_by(Events.time_fired).outerjoin(
                States, (Events.event_id == States.event_id)).outerjoin(
                    old_state, (States.old_state_id == old_state.state_id))
            # The below filter, removes state change events that do not have
            # and old_state, new_state, or the old and
            # new state are the same for v8 schema or later.
            #
            # If the events/states were stored before v8 schema, we relay on the
            # prev_states dict to remove them.
            #
            # When all data is schema v8 or later, the check for EMPTY_JSON_OBJECT
            # can be removed.
            .filter((Events.event_type != EVENT_STATE_CHANGED)
                    | (Events.event_data != EMPTY_JSON_OBJECT)
                    | ((States.state_id.isnot(None))
                       & (old_state.state_id.isnot(None))
                       & (States.state != old_state.state))).filter(
                           Events.event_type.in_(
                               ALL_EVENT_TYPES +
                               list(hass.data.get(DOMAIN, {})))).filter(
                                   (Events.time_fired > start_day)
                                   & (Events.time_fired < end_day)))

        if entity_ids:
            query = query.filter(((States.last_updated == States.last_changed)
                                  & States.entity_id.in_(entity_ids))
                                 | (States.state_id.is_(None)))
        else:
            query = query.filter((States.last_updated == States.last_changed)
                                 | (States.state_id.is_(None)))

        # When all data is schema v8 or later, prev_states can be removed
        prev_states = {}
        return list(
            humanify(hass, yield_events(query), entity_attr_cache,
                     prev_states))
Exemple #20
0
ALEXA_ENTITY_SCHEMA = vol.Schema({
    vol.Optional(alexa_sh.CONF_DESCRIPTION): cv.string,
    vol.Optional(alexa_sh.CONF_DISPLAY_CATEGORIES): cv.string,
    vol.Optional(alexa_sh.CONF_NAME): cv.string,
})

GOOGLE_ENTITY_SCHEMA = vol.Schema({
    vol.Optional(CONF_NAME): cv.string,
    vol.Optional(CONF_TYPE): vol.In(ga_sh.MAPPING_COMPONENT),
    vol.Optional(CONF_ALIASES): vol.All(cv.ensure_list, [cv.string])
})

ASSISTANT_SCHEMA = vol.Schema({
    vol.Optional(
        CONF_FILTER,
        default=lambda: entityfilter.generate_filter([], [], [], [])
    ): entityfilter.FILTER_SCHEMA,
})

ALEXA_SCHEMA = ASSISTANT_SCHEMA.extend({
    vol.Optional(CONF_ENTITY_CONFIG): {cv.entity_id: ALEXA_ENTITY_SCHEMA}
})

GACTIONS_SCHEMA = ASSISTANT_SCHEMA.extend({
    vol.Optional(CONF_ENTITY_CONFIG): {cv.entity_id: GOOGLE_ENTITY_SCHEMA}
})

CONFIG_SCHEMA = vol.Schema({
    DOMAIN: vol.Schema({
        vol.Optional(CONF_MODE, default=DEFAULT_MODE):
            vol.In([MODE_DEV] + list(SERVERS)),
async def async_setup(hass, config):
    """Set up the MQTT state feed."""
    mqtt = hass.components.mqtt
    conf = config.get(DOMAIN, {})
    base_topic = conf.get(CONF_BASE_TOPIC)
    if not base_topic.endswith("/"):
        base_topic = base_topic + "/"
    event_topic = base_topic + "event"
    control_topic = base_topic + "control"
    pub_include = conf.get(CONF_INCLUDE, {})
    pub_exclude = conf.get(CONF_EXCLUDE, {})
    publish_filter = generate_filter(
        pub_include.get(CONF_DOMAINS, []),
        pub_include.get(CONF_ENTITIES, []),
        pub_exclude.get(CONF_DOMAINS, []),
        pub_exclude.get(CONF_ENTITIES, []),
    )

    @callback
    def _control_listener(msg):
        """Receive remote control events from mqtt_share."""
        # process payload as JSON
        event = json.loads(msg.payload)
        event_data = event.get("event_data")
        domain = event_data.get(ATTR_DOMAIN)
        service = event_data.get(ATTR_SERVICE)
        data = event_data.get(ATTR_SERVICE_DATA)
        hass.async_add_job(hass.services.async_call(domain, service, data))
        _LOGGER.debug("Received remote %s event, data=%s",
                      event.get("event_type"), event_data)

    # subscribe to all control topics
    await mqtt.async_subscribe(control_topic, _control_listener)

    @callback
    def _state_publisher(entity_id, old_state, new_state):
        """Publish local states to mqtt_share via MQTT Server."""
        if new_state is None:
            return
        # do not publish entities not configured
        if not publish_filter(entity_id):
            return
        # start the current state dictionary
        state = {"state": new_state.state}
        state.update(dict(new_state.attributes))
        # convert state dictionary into JSON
        payload = json.dumps(state, cls=JSONEncoder)
        # create topic from entity_id
        topic = base_topic + entity_id.replace(".", "/") + "/state"
        # publish the topic, retain should be on for state topics
        hass.components.mqtt.async_publish(topic, payload, 0, True)
        _LOGGER.debug("Published state for '%s', state=%s", entity_id, payload)

    # asynchronous receive state changes
    async_track_state_change(hass, MATCH_ALL, _state_publisher)

    @callback
    def _event_publisher(event):
        """Publish local isy994_control events to mqtt_share via MQTT Server."""
        # we only publish local events
        if event.origin != EventOrigin.local:
            return
        # must be isy994_control event
        if event.event_type != "isy994_control":
            return
        entity_id = event.data.get(ATTR_ENTITY_ID)
        # must be one of our entities
        if not publish_filter(entity_id):
            return
        event_info = {"event_type": event.event_type, "event_data": event.data}
        payload = json.dumps(event_info, cls=JSONEncoder)
        # publish the topic, retain should be off for events
        hass.components.mqtt.async_publish(event_topic, payload, 0, False)
        _LOGGER.debug("Publish local event '%s' data=%s", event.event_type,
                      event.data)

    # listen for local events if you are going to publish them.
    hass.bus.async_listen(MATCH_ALL, _event_publisher)

    return True
Exemple #22
0
def _get_events(hass,
                config,
                start_day,
                end_day,
                entity_id=None,
                filters=None,
                entities_filter=None):
    """Get events for a period of time."""
    entity_attr_cache = EntityAttributeCache(hass)

    def yield_events(query):
        """Yield Events that are not filtered away."""
        for row in query.yield_per(1000):
            event = LazyEventPartialState(row)
            if _keep_event(hass, event, entities_filter):
                yield event

    with session_scope(hass=hass) as session:
        if entity_id is not None:
            entity_ids = [entity_id.lower()]
            entities_filter = generate_filter([], entity_ids, [], [])
            apply_sql_entities_filter = False
        else:
            entity_ids = None
            apply_sql_entities_filter = True

        old_state = aliased(States, name="old_state")

        query = (
            session.query(
                Events.event_type,
                Events.event_data,
                Events.time_fired,
                Events.context_user_id,
                States.state,
                States.entity_id,
                States.domain,
                States.attributes,
            ).order_by(Events.time_fired).outerjoin(
                States, (Events.event_id == States.event_id)).outerjoin(
                    old_state, (States.old_state_id == old_state.state_id))
            # The below filter, removes state change events that do not have
            # and old_state, new_state, or the old and
            # new state are the same for v8 schema or later.
            #
            # If the events/states were stored before v8 schema, we relay on the
            # prev_states dict to remove them.
            #
            # When all data is schema v8 or later, the check for EMPTY_JSON_OBJECT
            # can be removed.
            .filter((Events.event_type != EVENT_STATE_CHANGED)
                    | (Events.event_data != EMPTY_JSON_OBJECT)
                    | ((States.state_id.isnot(None))
                       & (old_state.state_id.isnot(None))
                       & (States.state.isnot(None))
                       & (States.state != old_state.state)))
            #
            # Prefilter out continuous domains that have
            # ATTR_UNIT_OF_MEASUREMENT as its much faster in sql.
            #
            .filter((Events.event_type != EVENT_STATE_CHANGED)
                    | sqlalchemy.not_(States.domain.in_(CONTINUOUS_DOMAINS))
                    | sqlalchemy.not_(
                        States.attributes.contains(UNIT_OF_MEASUREMENT_JSON))).
            filter(
                Events.event_type.in_(ALL_EVENT_TYPES +
                                      list(hass.data.get(DOMAIN, {})))).filter(
                                          (Events.time_fired > start_day)
                                          & (Events.time_fired < end_day)))

        if entity_ids:
            query = query.filter(((States.last_updated == States.last_changed)
                                  & States.entity_id.in_(entity_ids))
                                 | (States.state_id.is_(None)))
        else:
            query = query.filter((States.last_updated == States.last_changed)
                                 | (States.state_id.is_(None)))

        if apply_sql_entities_filter and filters:
            entity_filter = filters.entity_filter()
            if entity_filter is not None:
                query = query.filter(entity_filter | (
                    Events.event_type != EVENT_STATE_CHANGED))

        # When all data is schema v8 or later, prev_states can be removed
        prev_states = {}
        return list(
            humanify(hass, yield_events(query), entity_attr_cache,
                     prev_states))