Beispiel #1
0
 def filter_evts(events):
     return filter_events_for_client(
         self.store,
         user.to_string(),
         events,
         is_peeking=is_guest
     )
Beispiel #2
0
 def filter_evts(events):
     return filter_events_for_client(
         self.store,
         user.to_string(),
         events,
         is_peeking=is_guest
     )
Beispiel #3
0
    def get_event(self, user, room_id, event_id):
        """Retrieve a single specified event.

        Args:
            user (synapse.types.UserID): The user requesting the event
            room_id (str|None): The expected room id. We'll return None if the
                event's room does not match.
            event_id (str): The event ID to obtain.
        Returns:
            dict: An event, or None if there is no event matching this ID.
        Raises:
            SynapseError if there was a problem retrieving this event, or
            AuthError if the user does not have the rights to inspect this
            event.
        """
        event = yield self.store.get_event(event_id, check_room_id=room_id)

        if not event:
            defer.returnValue(None)
            return

        users = yield self.store.get_users_in_room(event.room_id)
        is_peeking = user.to_string() not in users

        filtered = yield filter_events_for_client(self.store,
                                                  user.to_string(), [event],
                                                  is_peeking=is_peeking)

        if not filtered:
            raise AuthError(403,
                            "You don't have permission to access that event.")

        defer.returnValue(event)
Beispiel #4
0
    def get_notif_vars(self, notif, user_id, notif_event, room_state_ids):
        results = yield self.store.get_events_around(
            notif["room_id"],
            notif["event_id"],
            before_limit=CONTEXT_BEFORE,
            after_limit=CONTEXT_AFTER,
        )

        ret = {
            "link": self.make_notif_link(notif),
            "ts": notif["received_ts"],
            "messages": [],
        }

        the_events = yield filter_events_for_client(
            self.storage, user_id, results["events_before"]
        )
        the_events.append(notif_event)

        for event in the_events:
            messagevars = yield self.get_message_vars(notif, event, room_state_ids)
            if messagevars is not None:
                ret["messages"].append(messagevars)

        return ret
Beispiel #5
0
    def _room_initial_sync_parted(self, user_id, room_id, pagin_config,
                                  membership, member_event_id, is_peeking):
        room_state = yield self.store.get_state_for_events([member_event_id], )

        room_state = room_state[member_event_id]

        limit = pagin_config.limit if pagin_config else None
        if limit is None:
            limit = 10

        stream_token = yield self.store.get_stream_token_for_event(
            member_event_id)

        messages, token = yield self.store.get_recent_events_for_room(
            room_id, limit=limit, end_token=stream_token)

        messages = yield filter_events_for_client(self.store,
                                                  user_id,
                                                  messages,
                                                  is_peeking=is_peeking)

        start_token = StreamToken.START.copy_and_replace("room_key", token)
        end_token = StreamToken.START.copy_and_replace("room_key",
                                                       stream_token)

        time_now = self.clock.time_msec()

        defer.returnValue({
            "membership":
            membership,
            "room_id":
            room_id,
            "messages": {
                "chunk": (yield self._event_serializer.serialize_events(
                    messages,
                    time_now,
                )),
                "start":
                start_token.to_string(),
                "end":
                end_token.to_string(),
            },
            "state": (yield self._event_serializer.serialize_events(
                room_state.values(),
                time_now,
            )),
            "presence": [],
            "receipts": [],
        })
        def check_for_updates(before_token, after_token):
            if not after_token.is_after(before_token):
                defer.returnValue(
                    EventStreamResult([], (from_token, from_token)))

            events = []
            end_token = from_token

            for name, source in self.event_sources.sources.items():
                keyname = "%s_key" % name
                before_id = getattr(before_token, keyname)
                after_id = getattr(after_token, keyname)
                if before_id == after_id:
                    continue
                if only_keys and name not in only_keys:
                    continue

                new_events, new_key = yield source.get_new_events(
                    user=user,
                    from_key=getattr(from_token, keyname),
                    limit=limit,
                    is_guest=is_peeking,
                    room_ids=room_ids,
                    explicit_room_id=explicit_room_id,
                )

                if name == "room":
                    new_events = yield filter_events_for_client(
                        self.store,
                        user.to_string(),
                        new_events,
                        is_peeking=is_peeking,
                    )
                elif name == "presence":
                    now = self.clock.time_msec()
                    new_events[:] = [{
                        "type":
                        "m.presence",
                        "content":
                        format_user_presence_state(event, now),
                    } for event in new_events]

                events.extend(new_events)
                end_token = end_token.copy_and_replace(keyname, new_key)

            defer.returnValue(
                EventStreamResult(events, (from_token, end_token)))
Beispiel #7
0
    def _room_initial_sync_parted(self, user_id, room_id, pagin_config,
                                  membership, member_event_id, is_peeking):
        room_state = yield self.store.get_state_for_events(
            [member_event_id],
        )

        room_state = room_state[member_event_id]

        limit = pagin_config.limit if pagin_config else None
        if limit is None:
            limit = 10

        stream_token = yield self.store.get_stream_token_for_event(
            member_event_id
        )

        messages, token = yield self.store.get_recent_events_for_room(
            room_id,
            limit=limit,
            end_token=stream_token
        )

        messages = yield filter_events_for_client(
            self.store, user_id, messages, is_peeking=is_peeking
        )

        start_token = StreamToken.START.copy_and_replace("room_key", token)
        end_token = StreamToken.START.copy_and_replace("room_key", stream_token)

        time_now = self.clock.time_msec()

        defer.returnValue({
            "membership": membership,
            "room_id": room_id,
            "messages": {
                "chunk": (yield self._event_serializer.serialize_events(
                    messages, time_now,
                )),
                "start": start_token.to_string(),
                "end": end_token.to_string(),
            },
            "state": (yield self._event_serializer.serialize_events(
                room_state.values(), time_now,
            )),
            "presence": [],
            "receipts": [],
        })
Beispiel #8
0
        def check_for_updates(before_token, after_token):
            if not after_token.is_after(before_token):
                defer.returnValue(EventStreamResult([], (from_token, from_token)))

            events = []
            end_token = from_token

            for name, source in self.event_sources.sources.items():
                keyname = "%s_key" % name
                before_id = getattr(before_token, keyname)
                after_id = getattr(after_token, keyname)
                if before_id == after_id:
                    continue
                if only_keys and name not in only_keys:
                    continue

                new_events, new_key = yield source.get_new_events(
                    user=user,
                    from_key=getattr(from_token, keyname),
                    limit=limit,
                    is_guest=is_peeking,
                    room_ids=room_ids,
                    explicit_room_id=explicit_room_id,
                )

                if name == "room":
                    new_events = yield filter_events_for_client(
                        self.store,
                        user.to_string(),
                        new_events,
                        is_peeking=is_peeking,
                    )
                elif name == "presence":
                    now = self.clock.time_msec()
                    new_events[:] = [
                        {
                            "type": "m.presence",
                            "content": format_user_presence_state(event, now),
                        }
                        for event in new_events
                    ]

                events.extend(new_events)
                end_token = end_token.copy_and_replace(keyname, new_key)

            defer.returnValue(EventStreamResult(events, (from_token, end_token)))
Beispiel #9
0
    def test_visibility(self):
        """Tests that synapse.visibility.filter_events_for_client correctly filters out
        outdated events
        """
        store = self.hs.get_datastore()
        storage = self.hs.get_storage()
        room_id = self.helper.create_room_as(self.user_id, tok=self.token)
        events = []

        # Send a first event, which should be filtered out at the end of the test.
        resp = self.helper.send(room_id=room_id, body="1", tok=self.token)

        # Get the event from the store so that we end up with a FrozenEvent that we can
        # give to filter_events_for_client. We need to do this now because the event won't
        # be in the database anymore after it has expired.
        events.append(self.get_success(store.get_event(resp.get("event_id"))))

        # Advance the time by 2 days. We're using the default retention policy, therefore
        # after this the first event will still be valid.
        self.reactor.advance(one_day_ms * 2 / 1000)

        # Send another event, which shouldn't get filtered out.
        resp = self.helper.send(room_id=room_id, body="2", tok=self.token)

        valid_event_id = resp.get("event_id")

        events.append(self.get_success(store.get_event(valid_event_id)))

        # Advance the time by another 2 days. After this, the first event should be
        # outdated but not the second one.
        self.reactor.advance(one_day_ms * 2 / 1000)

        # Run filter_events_for_client with our list of FrozenEvents.
        filtered_events = self.get_success(
            filter_events_for_client(storage, self.user_id, events))

        # We should only get one event back.
        self.assertEqual(len(filtered_events), 1, filtered_events)
        # That event should be the second, not outdated event.
        self.assertEqual(filtered_events[0].event_id, valid_event_id,
                         filtered_events)
Beispiel #10
0
    def test_visibility(self) -> None:
        """Tests that synapse.visibility.filter_events_for_client correctly filters out
        outdated events, even if the purge job hasn't got to them yet.

        We do this by setting a very long time between purge jobs.
        """
        store = self.hs.get_datastores().main
        storage_controllers = self.hs.get_storage_controllers()
        room_id = self.helper.create_room_as(self.user_id, tok=self.token)

        # Send a first event, which should be filtered out at the end of the test.
        resp = self.helper.send(room_id=room_id, body="1", tok=self.token)
        first_event_id = resp.get("event_id")

        # Advance the time by 2 days. We're using the default retention policy, therefore
        # after this the first event will still be valid.
        self.reactor.advance(one_day_ms * 2 / 1000)

        # Send another event, which shouldn't get filtered out.
        resp = self.helper.send(room_id=room_id, body="2", tok=self.token)
        valid_event_id = resp.get("event_id")

        # Advance the time by another 2 days. After this, the first event should be
        # outdated but not the second one.
        self.reactor.advance(one_day_ms * 2 / 1000)

        # Fetch the events, and run filter_events_for_client on them
        events = self.get_success(
            store.get_events_as_list([first_event_id, valid_event_id]))
        self.assertEqual(2, len(events), "events retrieved from database")
        filtered_events = self.get_success(
            filter_events_for_client(storage_controllers, self.user_id,
                                     events))

        # We should only get one event back.
        self.assertEqual(len(filtered_events), 1, filtered_events)
        # That event should be the second, not outdated event.
        self.assertEqual(filtered_events[0].event_id, valid_event_id,
                         filtered_events)
Beispiel #11
0
        def check_for_updates(before_token, after_token):
            if not after_token.is_after(before_token):
                defer.returnValue(
                    EventStreamResult([], (from_token, from_token)))

            events = []
            end_token = from_token

            for name, source in self.event_sources.sources.items():
                keyname = "%s_key" % name
                before_id = getattr(before_token, keyname)
                after_id = getattr(after_token, keyname)
                if before_id == after_id:
                    continue
                if only_keys and name not in only_keys:
                    continue

                new_events, new_key = yield source.get_new_events(
                    user=user,
                    from_key=getattr(from_token, keyname),
                    limit=limit,
                    is_guest=is_peeking,
                    room_ids=room_ids,
                )

                if name == "room":
                    new_events = yield filter_events_for_client(
                        self.store,
                        user.to_string(),
                        new_events,
                        is_peeking=is_peeking,
                    )

                events.extend(new_events)
                end_token = end_token.copy_and_replace(keyname, new_key)

            defer.returnValue(
                EventStreamResult(events, (from_token, end_token)))
Beispiel #12
0
    def get_event(self, user, room_id, event_id):
        """Retrieve a single specified event.

        Args:
            user (synapse.types.UserID): The user requesting the event
            room_id (str|None): The expected room id. We'll return None if the
                event's room does not match.
            event_id (str): The event ID to obtain.
        Returns:
            dict: An event, or None if there is no event matching this ID.
        Raises:
            SynapseError if there was a problem retrieving this event, or
            AuthError if the user does not have the rights to inspect this
            event.
        """
        event = yield self.store.get_event(event_id, check_room_id=room_id)

        if not event:
            defer.returnValue(None)
            return

        users = yield self.store.get_users_in_room(event.room_id)
        is_peeking = user.to_string() not in users

        filtered = yield filter_events_for_client(
            self.store,
            user.to_string(),
            [event],
            is_peeking=is_peeking
        )

        if not filtered:
            raise AuthError(
                403,
                "You don't have permission to access that event."
            )

        defer.returnValue(event)
Beispiel #13
0
    def get_notif_vars(self, notif, user_id, notif_event, room_state):
        results = yield self.store.get_events_around(
            notif['room_id'], notif['event_id'],
            before_limit=CONTEXT_BEFORE, after_limit=CONTEXT_AFTER
        )

        ret = {
            "link": self.make_notif_link(notif),
            "ts": notif['received_ts'],
            "messages": [],
        }

        the_events = yield filter_events_for_client(
            self.store, user_id, results["events_before"]
        )
        the_events.append(notif_event)

        for event in the_events:
            messagevars = self.get_message_vars(notif, event, room_state)
            if messagevars is not None:
                ret['messages'].append(messagevars)

        defer.returnValue(ret)
Beispiel #14
0
        def check_for_updates(before_token, after_token):
            if not after_token.is_after(before_token):
                defer.returnValue(EventStreamResult([], (from_token, from_token)))

            events = []
            end_token = from_token

            for name, source in self.event_sources.sources.items():
                keyname = "%s_key" % name
                before_id = getattr(before_token, keyname)
                after_id = getattr(after_token, keyname)
                if before_id == after_id:
                    continue
                if only_keys and name not in only_keys:
                    continue

                new_events, new_key = yield source.get_new_events(
                    user=user,
                    from_key=getattr(from_token, keyname),
                    limit=limit,
                    is_guest=is_peeking,
                    room_ids=room_ids,
                )

                if name == "room":
                    new_events = yield filter_events_for_client(
                        self.store,
                        user.to_string(),
                        new_events,
                        is_peeking=is_peeking,
                    )

                events.extend(new_events)
                end_token = end_token.copy_and_replace(keyname, new_key)

            defer.returnValue(EventStreamResult(events, (from_token, end_token)))
Beispiel #15
0
    def _load_filtered_recents(self,
                               room_id,
                               sync_config,
                               now_token,
                               since_token=None,
                               recents=None,
                               newly_joined_room=False):
        """
        Returns:
            a Deferred TimelineBatch
        """
        with Measure(self.clock, "load_filtered_recents"):
            timeline_limit = sync_config.filter_collection.timeline_limit()

            if recents is None or newly_joined_room or timeline_limit < len(
                    recents):
                limited = True
            else:
                limited = False

            if recents:
                recents = sync_config.filter_collection.filter_room_timeline(
                    recents)
                recents = yield filter_events_for_client(
                    self.store,
                    sync_config.user.to_string(),
                    recents,
                )
            else:
                recents = []

            if not limited:
                defer.returnValue(
                    TimelineBatch(events=recents,
                                  prev_batch=now_token,
                                  limited=False))

            filtering_factor = 2
            load_limit = max(timeline_limit * filtering_factor, 10)
            max_repeat = 5  # Only try a few times per room, otherwise
            room_key = now_token.room_key
            end_key = room_key

            since_key = None
            if since_token and not newly_joined_room:
                since_key = since_token.room_key

            while limited and len(recents) < timeline_limit and max_repeat:
                events, end_key = yield self.store.get_room_events_stream_for_room(
                    room_id,
                    limit=load_limit + 1,
                    from_key=since_key,
                    to_key=end_key,
                )
                loaded_recents = sync_config.filter_collection.filter_room_timeline(
                    events)
                loaded_recents = yield filter_events_for_client(
                    self.store,
                    sync_config.user.to_string(),
                    loaded_recents,
                )
                loaded_recents.extend(recents)
                recents = loaded_recents

                if len(events) <= load_limit:
                    limited = False
                    break
                max_repeat -= 1

            if len(recents) > timeline_limit:
                limited = True
                recents = recents[-timeline_limit:]
                room_key = recents[0].internal_metadata.before

            prev_batch_token = now_token.copy_and_replace("room_key", room_key)

        defer.returnValue(
            TimelineBatch(events=recents,
                          prev_batch=prev_batch_token,
                          limited=limited or newly_joined_room))
Beispiel #16
0
    def get_messages(self, requester, room_id=None, pagin_config=None,
                     as_client_event=True):
        """Get messages in a room.

        Args:
            requester (Requester): The user requesting messages.
            room_id (str): The room they want messages from.
            pagin_config (synapse.api.streams.PaginationConfig): The pagination
                config rules to apply, if any.
            as_client_event (bool): True to get events in client-server format.
        Returns:
            dict: Pagination API results
        """
        user_id = requester.user.to_string()
        data_source = self.hs.get_event_sources().sources["room"]

        if pagin_config.from_token:
            room_token = pagin_config.from_token.room_key
        else:
            pagin_config.from_token = (
                yield self.hs.get_event_sources().get_current_token(
                    direction='b'
                )
            )
            room_token = pagin_config.from_token.room_key

        room_token = RoomStreamToken.parse(room_token)

        pagin_config.from_token = pagin_config.from_token.copy_and_replace(
            "room_key", str(room_token)
        )

        source_config = pagin_config.get_source_config("room")

        membership, member_event_id = yield self._check_in_room_or_world_readable(
            room_id, user_id
        )

        if source_config.direction == 'b':
            # if we're going backwards, we might need to backfill. This
            # requires that we have a topo token.
            if room_token.topological:
                max_topo = room_token.topological
            else:
                max_topo = yield self.store.get_max_topological_token_for_stream_and_room(
                    room_id, room_token.stream
                )

            if membership == Membership.LEAVE:
                # If they have left the room then clamp the token to be before
                # they left the room, to save the effort of loading from the
                # database.
                leave_token = yield self.store.get_topological_token_for_event(
                    member_event_id
                )
                leave_token = RoomStreamToken.parse(leave_token)
                if leave_token.topological < max_topo:
                    source_config.from_key = str(leave_token)

            yield self.hs.get_handlers().federation_handler.maybe_backfill(
                room_id, max_topo
            )

        events, next_key = yield data_source.get_pagination_rows(
            requester.user, source_config, room_id
        )

        next_token = pagin_config.from_token.copy_and_replace(
            "room_key", next_key
        )

        if not events:
            defer.returnValue({
                "chunk": [],
                "start": pagin_config.from_token.to_string(),
                "end": next_token.to_string(),
            })

        events = yield filter_events_for_client(
            self.store,
            user_id,
            events,
            is_peeking=(member_event_id is None),
        )

        time_now = self.clock.time_msec()

        chunk = {
            "chunk": [
                serialize_event(e, time_now, as_client_event)
                for e in events
            ],
            "start": pagin_config.from_token.to_string(),
            "end": next_token.to_string(),
        }

        defer.returnValue(chunk)
Beispiel #17
0
    def get_state_events(
            self,
            user_id,
            room_id,
            state_filter=StateFilter.all(),
            at_token=None,
            is_guest=False,
    ):
        """Retrieve all state events for a given room. If the user is
        joined to the room then return the current state. If the user has
        left the room return the state events from when they left. If an explicit
        'at' parameter is passed, return the state events as of that event, if
        visible.

        Args:
            user_id(str): The user requesting state events.
            room_id(str): The room ID to get all state events from.
            state_filter (StateFilter): The state filter used to fetch state
                from the database.
            at_token(StreamToken|None): the stream token of the at which we are requesting
                the stats. If the user is not allowed to view the state as of that
                stream token, we raise a 403 SynapseError. If None, returns the current
                state based on the current_state_events table.
            is_guest(bool): whether this user is a guest
        Returns:
            A list of dicts representing state events. [{}, {}, {}]
        Raises:
            NotFoundError (404) if the at token does not yield an event

            AuthError (403) if the user doesn't have permission to view
            members of this room.
        """
        if at_token:
            # FIXME this claims to get the state at a stream position, but
            # get_recent_events_for_room operates by topo ordering. This therefore
            # does not reliably give you the state at the given stream position.
            # (https://github.com/matrix-org/synapse/issues/3305)
            last_events, _ = yield self.store.get_recent_events_for_room(
                room_id, end_token=at_token.room_key, limit=1)

            if not last_events:
                raise NotFoundError("Can't find event for token %s" %
                                    (at_token, ))

            visible_events = yield filter_events_for_client(
                self.storage,
                user_id,
                last_events,
                filter_send_to_client=False)

            event = last_events[0]
            if visible_events:
                room_state = yield self.state_store.get_state_for_events(
                    [event.event_id], state_filter=state_filter)
                room_state = room_state[event.event_id]
            else:
                raise AuthError(
                    403,
                    "User %s not allowed to view events in room %s at token %s"
                    % (user_id, room_id, at_token),
                )
        else:
            (
                membership,
                membership_event_id,
            ) = yield self.auth.check_user_in_room_or_world_readable(
                room_id, user_id, allow_departed_users=True)

            if membership == Membership.JOIN:
                state_ids = yield self.store.get_filtered_current_state_ids(
                    room_id, state_filter=state_filter)
                room_state = yield self.store.get_events(state_ids.values())
            elif membership == Membership.LEAVE:
                room_state = yield self.state_store.get_state_for_events(
                    [membership_event_id], state_filter=state_filter)
                room_state = room_state[membership_event_id]

        now = self.clock.time_msec()
        events = yield self._event_serializer.serialize_events(
            room_state.values(),
            now,
            # We don't bother bundling aggregations in when asked for state
            # events, as clients won't use them.
            bundle_aggregations=False,
        )
        return events
Beispiel #18
0
    def _room_initial_sync_joined(self, user_id, room_id, pagin_config,
                                  membership, is_peeking):
        current_state = yield self.state.get_current_state(room_id=room_id, )

        # TODO: These concurrently
        time_now = self.clock.time_msec()
        state = yield self._event_serializer.serialize_events(
            current_state.values(),
            time_now,
        )

        now_token = yield self.hs.get_event_sources().get_current_token()

        limit = pagin_config.limit if pagin_config else None
        if limit is None:
            limit = 10

        room_members = [
            m for m in current_state.values() if m.type == EventTypes.Member
            and m.content["membership"] == Membership.JOIN
        ]

        presence_handler = self.hs.get_presence_handler()

        @defer.inlineCallbacks
        def get_presence():
            # If presence is disabled, return an empty list
            if not self.hs.config.use_presence:
                defer.returnValue([])

            states = yield presence_handler.get_states(
                [m.user_id for m in room_members],
                as_event=True,
            )

            defer.returnValue(states)

        @defer.inlineCallbacks
        def get_receipts():
            receipts = yield self.store.get_linearized_receipts_for_room(
                room_id,
                to_key=now_token.receipt_key,
            )
            if not receipts:
                receipts = []
            defer.returnValue(receipts)

        presence, receipts, (messages, token) = yield make_deferred_yieldable(
            defer.gatherResults(
                [
                    run_in_background(get_presence),
                    run_in_background(get_receipts),
                    run_in_background(
                        self.store.get_recent_events_for_room,
                        room_id,
                        limit=limit,
                        end_token=now_token.room_key,
                    )
                ],
                consumeErrors=True,
            ).addErrback(unwrapFirstError), )

        messages = yield filter_events_for_client(
            self.store,
            user_id,
            messages,
            is_peeking=is_peeking,
        )

        start_token = now_token.copy_and_replace("room_key", token)
        end_token = now_token

        time_now = self.clock.time_msec()

        ret = {
            "room_id": room_id,
            "messages": {
                "chunk": (yield self._event_serializer.serialize_events(
                    messages,
                    time_now,
                )),
                "start":
                start_token.to_string(),
                "end":
                end_token.to_string(),
            },
            "state": state,
            "presence": presence,
            "receipts": receipts,
        }
        if not is_peeking:
            ret["membership"] = membership

        defer.returnValue(ret)
Beispiel #19
0
    def _room_initial_sync_joined(self, user_id, room_id, pagin_config,
                                  membership, is_peeking):
        current_state = yield self.state.get_current_state(
            room_id=room_id,
        )

        # TODO: These concurrently
        time_now = self.clock.time_msec()
        state = [
            serialize_event(x, time_now)
            for x in current_state.values()
        ]

        now_token = yield self.hs.get_event_sources().get_current_token()

        limit = pagin_config.limit if pagin_config else None
        if limit is None:
            limit = 10

        room_members = [
            m for m in current_state.values()
            if m.type == EventTypes.Member
            and m.content["membership"] == Membership.JOIN
        ]

        presence_handler = self.hs.get_presence_handler()

        @defer.inlineCallbacks
        def get_presence():
            # If presence is disabled, return an empty list
            if not self.hs.config.use_presence:
                defer.returnValue([])

            states = yield presence_handler.get_states(
                [m.user_id for m in room_members],
                as_event=True,
            )

            defer.returnValue(states)

        @defer.inlineCallbacks
        def get_receipts():
            receipts = yield self.store.get_linearized_receipts_for_room(
                room_id,
                to_key=now_token.receipt_key,
            )
            if not receipts:
                receipts = []
            defer.returnValue(receipts)

        presence, receipts, (messages, token) = yield make_deferred_yieldable(
            defer.gatherResults(
                [
                    run_in_background(get_presence),
                    run_in_background(get_receipts),
                    run_in_background(
                        self.store.get_recent_events_for_room,
                        room_id,
                        limit=limit,
                        end_token=now_token.room_key,
                    )
                ],
                consumeErrors=True,
            ).addErrback(unwrapFirstError),
        )

        messages = yield filter_events_for_client(
            self.store, user_id, messages, is_peeking=is_peeking,
        )

        start_token = now_token.copy_and_replace("room_key", token)
        end_token = now_token

        time_now = self.clock.time_msec()

        ret = {
            "room_id": room_id,
            "messages": {
                "chunk": [serialize_event(m, time_now) for m in messages],
                "start": start_token.to_string(),
                "end": end_token.to_string(),
            },
            "state": state,
            "presence": presence,
            "receipts": receipts,
        }
        if not is_peeking:
            ret["membership"] = membership

        defer.returnValue(ret)
Beispiel #20
0
    def search(self, user, content, batch=None):
        """Performs a full text search for a user.

        Args:
            user (UserID)
            content (dict): Search parameters
            batch (str): The next_batch parameter. Used for pagination.

        Returns:
            dict to be returned to the client with results of search
        """

        batch_group = None
        batch_group_key = None
        batch_token = None
        if batch:
            try:
                b = decode_base64(batch)
                batch_group, batch_group_key, batch_token = b.split("\n")

                assert batch_group is not None
                assert batch_group_key is not None
                assert batch_token is not None
            except Exception:
                raise SynapseError(400, "Invalid batch")

        try:
            room_cat = content["search_categories"]["room_events"]

            # The actual thing to query in FTS
            search_term = room_cat["search_term"]

            # Which "keys" to search over in FTS query
            keys = room_cat.get("keys", [
                "content.body", "content.name", "content.topic",
            ])

            # Filter to apply to results
            filter_dict = room_cat.get("filter", {})

            # What to order results by (impacts whether pagination can be doen)
            order_by = room_cat.get("order_by", "rank")

            # Return the current state of the rooms?
            include_state = room_cat.get("include_state", False)

            # Include context around each event?
            event_context = room_cat.get(
                "event_context", None
            )

            # Group results together? May allow clients to paginate within a
            # group
            group_by = room_cat.get("groupings", {}).get("group_by", {})
            group_keys = [g["key"] for g in group_by]

            if event_context is not None:
                before_limit = int(event_context.get(
                    "before_limit", 5
                ))
                after_limit = int(event_context.get(
                    "after_limit", 5
                ))

                # Return the historic display name and avatar for the senders
                # of the events?
                include_profile = bool(event_context.get("include_profile", False))
        except KeyError:
            raise SynapseError(400, "Invalid search query")

        if order_by not in ("rank", "recent"):
            raise SynapseError(400, "Invalid order by: %r" % (order_by,))

        if set(group_keys) - {"room_id", "sender"}:
            raise SynapseError(
                400,
                "Invalid group by keys: %r" % (set(group_keys) - {"room_id", "sender"},)
            )

        search_filter = Filter(filter_dict)

        # TODO: Search through left rooms too
        rooms = yield self.store.get_rooms_for_user_where_membership_is(
            user.to_string(),
            membership_list=[Membership.JOIN],
            # membership_list=[Membership.JOIN, Membership.LEAVE, Membership.Ban],
        )
        room_ids = set(r.room_id for r in rooms)

        room_ids = search_filter.filter_rooms(room_ids)

        if batch_group == "room_id":
            room_ids.intersection_update({batch_group_key})

        if not room_ids:
            defer.returnValue({
                "search_categories": {
                    "room_events": {
                        "results": [],
                        "count": 0,
                        "highlights": [],
                    }
                }
            })

        rank_map = {}  # event_id -> rank of event
        allowed_events = []
        room_groups = {}  # Holds result of grouping by room, if applicable
        sender_group = {}  # Holds result of grouping by sender, if applicable

        # Holds the next_batch for the entire result set if one of those exists
        global_next_batch = None

        highlights = set()

        count = None

        if order_by == "rank":
            search_result = yield self.store.search_msgs(
                room_ids, search_term, keys
            )

            count = search_result["count"]

            if search_result["highlights"]:
                highlights.update(search_result["highlights"])

            results = search_result["results"]

            results_map = {r["event"].event_id: r for r in results}

            rank_map.update({r["event"].event_id: r["rank"] for r in results})

            filtered_events = search_filter.filter([r["event"] for r in results])

            events = yield filter_events_for_client(
                self.store, user.to_string(), filtered_events
            )

            events.sort(key=lambda e: -rank_map[e.event_id])
            allowed_events = events[:search_filter.limit()]

            for e in allowed_events:
                rm = room_groups.setdefault(e.room_id, {
                    "results": [],
                    "order": rank_map[e.event_id],
                })
                rm["results"].append(e.event_id)

                s = sender_group.setdefault(e.sender, {
                    "results": [],
                    "order": rank_map[e.event_id],
                })
                s["results"].append(e.event_id)

        elif order_by == "recent":
            room_events = []
            i = 0

            pagination_token = batch_token

            # We keep looping and we keep filtering until we reach the limit
            # or we run out of things.
            # But only go around 5 times since otherwise synapse will be sad.
            while len(room_events) < search_filter.limit() and i < 5:
                i += 1
                search_result = yield self.store.search_rooms(
                    room_ids, search_term, keys, search_filter.limit() * 2,
                    pagination_token=pagination_token,
                )

                if search_result["highlights"]:
                    highlights.update(search_result["highlights"])

                count = search_result["count"]

                results = search_result["results"]

                results_map = {r["event"].event_id: r for r in results}

                rank_map.update({r["event"].event_id: r["rank"] for r in results})

                filtered_events = search_filter.filter([
                    r["event"] for r in results
                ])

                events = yield filter_events_for_client(
                    self.store, user.to_string(), filtered_events
                )

                room_events.extend(events)
                room_events = room_events[:search_filter.limit()]

                if len(results) < search_filter.limit() * 2:
                    pagination_token = None
                    break
                else:
                    pagination_token = results[-1]["pagination_token"]

            for event in room_events:
                group = room_groups.setdefault(event.room_id, {
                    "results": [],
                })
                group["results"].append(event.event_id)

            if room_events and len(room_events) >= search_filter.limit():
                last_event_id = room_events[-1].event_id
                pagination_token = results_map[last_event_id]["pagination_token"]

                # We want to respect the given batch group and group keys so
                # that if people blindly use the top level `next_batch` token
                # it returns more from the same group (if applicable) rather
                # than reverting to searching all results again.
                if batch_group and batch_group_key:
                    global_next_batch = encode_base64("%s\n%s\n%s" % (
                        batch_group, batch_group_key, pagination_token
                    ))
                else:
                    global_next_batch = encode_base64("%s\n%s\n%s" % (
                        "all", "", pagination_token
                    ))

                for room_id, group in room_groups.items():
                    group["next_batch"] = encode_base64("%s\n%s\n%s" % (
                        "room_id", room_id, pagination_token
                    ))

            allowed_events.extend(room_events)

        else:
            # We should never get here due to the guard earlier.
            raise NotImplementedError()

        # If client has asked for "context" for each event (i.e. some surrounding
        # events and state), fetch that
        if event_context is not None:
            now_token = yield self.hs.get_event_sources().get_current_token()

            contexts = {}
            for event in allowed_events:
                res = yield self.store.get_events_around(
                    event.room_id, event.event_id, before_limit, after_limit
                )

                res["events_before"] = yield filter_events_for_client(
                    self.store, user.to_string(), res["events_before"]
                )

                res["events_after"] = yield filter_events_for_client(
                    self.store, user.to_string(), res["events_after"]
                )

                res["start"] = now_token.copy_and_replace(
                    "room_key", res["start"]
                ).to_string()

                res["end"] = now_token.copy_and_replace(
                    "room_key", res["end"]
                ).to_string()

                if include_profile:
                    senders = set(
                        ev.sender
                        for ev in itertools.chain(
                            res["events_before"], [event], res["events_after"]
                        )
                    )

                    if res["events_after"]:
                        last_event_id = res["events_after"][-1].event_id
                    else:
                        last_event_id = event.event_id

                    state = yield self.store.get_state_for_event(
                        last_event_id,
                        types=[(EventTypes.Member, sender) for sender in senders]
                    )

                    res["profile_info"] = {
                        s.state_key: {
                            "displayname": s.content.get("displayname", None),
                            "avatar_url": s.content.get("avatar_url", None),
                        }
                        for s in state.values()
                        if s.type == EventTypes.Member and s.state_key in senders
                    }

                contexts[event.event_id] = res
        else:
            contexts = {}

        # TODO: Add a limit

        time_now = self.clock.time_msec()

        for context in contexts.values():
            context["events_before"] = [
                serialize_event(e, time_now)
                for e in context["events_before"]
            ]
            context["events_after"] = [
                serialize_event(e, time_now)
                for e in context["events_after"]
            ]

        state_results = {}
        if include_state:
            rooms = set(e.room_id for e in allowed_events)
            for room_id in rooms:
                state = yield self.state_handler.get_current_state(room_id)
                state_results[room_id] = list(state.values())

            state_results.values()

        # We're now about to serialize the events. We should not make any
        # blocking calls after this. Otherwise the 'age' will be wrong

        results = [
            {
                "rank": rank_map[e.event_id],
                "result": serialize_event(e, time_now),
                "context": contexts.get(e.event_id, {}),
            }
            for e in allowed_events
        ]

        rooms_cat_res = {
            "results": results,
            "count": count,
            "highlights": list(highlights),
        }

        if state_results:
            rooms_cat_res["state"] = {
                room_id: [serialize_event(e, time_now) for e in state]
                for room_id, state in state_results.items()
            }

        if room_groups and "room_id" in group_keys:
            rooms_cat_res.setdefault("groups", {})["room_id"] = room_groups

        if sender_group and "sender" in group_keys:
            rooms_cat_res.setdefault("groups", {})["sender"] = sender_group

        if global_next_batch:
            rooms_cat_res["next_batch"] = global_next_batch

        defer.returnValue({
            "search_categories": {
                "room_events": rooms_cat_res
            }
        })
Beispiel #21
0
    def search(self, user, content, batch=None):
        """Performs a full text search for a user.

        Args:
            user (UserID)
            content (dict): Search parameters
            batch (str): The next_batch parameter. Used for pagination.

        Returns:
            dict to be returned to the client with results of search
        """

        batch_group = None
        batch_group_key = None
        batch_token = None
        if batch:
            try:
                b = decode_base64(batch)
                batch_group, batch_group_key, batch_token = b.split("\n")

                assert batch_group is not None
                assert batch_group_key is not None
                assert batch_token is not None
            except Exception:
                raise SynapseError(400, "Invalid batch")

        try:
            room_cat = content["search_categories"]["room_events"]

            # The actual thing to query in FTS
            search_term = room_cat["search_term"]

            # Which "keys" to search over in FTS query
            keys = room_cat.get("keys", [
                "content.body", "content.name", "content.topic",
            ])

            # Filter to apply to results
            filter_dict = room_cat.get("filter", {})

            # What to order results by (impacts whether pagination can be doen)
            order_by = room_cat.get("order_by", "rank")

            # Return the current state of the rooms?
            include_state = room_cat.get("include_state", False)

            # Include context around each event?
            event_context = room_cat.get(
                "event_context", None
            )

            # Group results together? May allow clients to paginate within a
            # group
            group_by = room_cat.get("groupings", {}).get("group_by", {})
            group_keys = [g["key"] for g in group_by]

            if event_context is not None:
                before_limit = int(event_context.get(
                    "before_limit", 5
                ))
                after_limit = int(event_context.get(
                    "after_limit", 5
                ))

                # Return the historic display name and avatar for the senders
                # of the events?
                include_profile = bool(event_context.get("include_profile", False))
        except KeyError:
            raise SynapseError(400, "Invalid search query")

        if order_by not in ("rank", "recent"):
            raise SynapseError(400, "Invalid order by: %r" % (order_by,))

        if set(group_keys) - {"room_id", "sender"}:
            raise SynapseError(
                400,
                "Invalid group by keys: %r" % (set(group_keys) - {"room_id", "sender"},)
            )

        search_filter = Filter(filter_dict)

        # TODO: Search through left rooms too
        rooms = yield self.store.get_rooms_for_user_where_membership_is(
            user.to_string(),
            membership_list=[Membership.JOIN],
            # membership_list=[Membership.JOIN, Membership.LEAVE, Membership.Ban],
        )
        room_ids = set(r.room_id for r in rooms)

        room_ids = search_filter.filter_rooms(room_ids)

        if batch_group == "room_id":
            room_ids.intersection_update({batch_group_key})

        if not room_ids:
            defer.returnValue({
                "search_categories": {
                    "room_events": {
                        "results": [],
                        "count": 0,
                        "highlights": [],
                    }
                }
            })

        rank_map = {}  # event_id -> rank of event
        allowed_events = []
        room_groups = {}  # Holds result of grouping by room, if applicable
        sender_group = {}  # Holds result of grouping by sender, if applicable

        # Holds the next_batch for the entire result set if one of those exists
        global_next_batch = None

        highlights = set()

        count = None

        if order_by == "rank":
            search_result = yield self.store.search_msgs(
                room_ids, search_term, keys
            )

            count = search_result["count"]

            if search_result["highlights"]:
                highlights.update(search_result["highlights"])

            results = search_result["results"]

            results_map = {r["event"].event_id: r for r in results}

            rank_map.update({r["event"].event_id: r["rank"] for r in results})

            filtered_events = search_filter.filter([r["event"] for r in results])

            events = yield filter_events_for_client(
                self.store, user.to_string(), filtered_events
            )

            events.sort(key=lambda e: -rank_map[e.event_id])
            allowed_events = events[:search_filter.limit()]

            for e in allowed_events:
                rm = room_groups.setdefault(e.room_id, {
                    "results": [],
                    "order": rank_map[e.event_id],
                })
                rm["results"].append(e.event_id)

                s = sender_group.setdefault(e.sender, {
                    "results": [],
                    "order": rank_map[e.event_id],
                })
                s["results"].append(e.event_id)

        elif order_by == "recent":
            room_events = []
            i = 0

            pagination_token = batch_token

            # We keep looping and we keep filtering until we reach the limit
            # or we run out of things.
            # But only go around 5 times since otherwise synapse will be sad.
            while len(room_events) < search_filter.limit() and i < 5:
                i += 1
                search_result = yield self.store.search_rooms(
                    room_ids, search_term, keys, search_filter.limit() * 2,
                    pagination_token=pagination_token,
                )

                if search_result["highlights"]:
                    highlights.update(search_result["highlights"])

                count = search_result["count"]

                results = search_result["results"]

                results_map = {r["event"].event_id: r for r in results}

                rank_map.update({r["event"].event_id: r["rank"] for r in results})

                filtered_events = search_filter.filter([
                    r["event"] for r in results
                ])

                events = yield filter_events_for_client(
                    self.store, user.to_string(), filtered_events
                )

                room_events.extend(events)
                room_events = room_events[:search_filter.limit()]

                if len(results) < search_filter.limit() * 2:
                    pagination_token = None
                    break
                else:
                    pagination_token = results[-1]["pagination_token"]

            for event in room_events:
                group = room_groups.setdefault(event.room_id, {
                    "results": [],
                })
                group["results"].append(event.event_id)

            if room_events and len(room_events) >= search_filter.limit():
                last_event_id = room_events[-1].event_id
                pagination_token = results_map[last_event_id]["pagination_token"]

                # We want to respect the given batch group and group keys so
                # that if people blindly use the top level `next_batch` token
                # it returns more from the same group (if applicable) rather
                # than reverting to searching all results again.
                if batch_group and batch_group_key:
                    global_next_batch = encode_base64("%s\n%s\n%s" % (
                        batch_group, batch_group_key, pagination_token
                    ))
                else:
                    global_next_batch = encode_base64("%s\n%s\n%s" % (
                        "all", "", pagination_token
                    ))

                for room_id, group in room_groups.items():
                    group["next_batch"] = encode_base64("%s\n%s\n%s" % (
                        "room_id", room_id, pagination_token
                    ))

            allowed_events.extend(room_events)

        else:
            # We should never get here due to the guard earlier.
            raise NotImplementedError()

        # If client has asked for "context" for each event (i.e. some surrounding
        # events and state), fetch that
        if event_context is not None:
            now_token = yield self.hs.get_event_sources().get_current_token()

            contexts = {}
            for event in allowed_events:
                res = yield self.store.get_events_around(
                    event.room_id, event.event_id, before_limit, after_limit
                )

                res["events_before"] = yield filter_events_for_client(
                    self.store, user.to_string(), res["events_before"]
                )

                res["events_after"] = yield filter_events_for_client(
                    self.store, user.to_string(), res["events_after"]
                )

                res["start"] = now_token.copy_and_replace(
                    "room_key", res["start"]
                ).to_string()

                res["end"] = now_token.copy_and_replace(
                    "room_key", res["end"]
                ).to_string()

                if include_profile:
                    senders = set(
                        ev.sender
                        for ev in itertools.chain(
                            res["events_before"], [event], res["events_after"]
                        )
                    )

                    if res["events_after"]:
                        last_event_id = res["events_after"][-1].event_id
                    else:
                        last_event_id = event.event_id

                    state = yield self.store.get_state_for_event(
                        last_event_id,
                        types=[(EventTypes.Member, sender) for sender in senders]
                    )

                    res["profile_info"] = {
                        s.state_key: {
                            "displayname": s.content.get("displayname", None),
                            "avatar_url": s.content.get("avatar_url", None),
                        }
                        for s in state.values()
                        if s.type == EventTypes.Member and s.state_key in senders
                    }

                contexts[event.event_id] = res
        else:
            contexts = {}

        # TODO: Add a limit

        time_now = self.clock.time_msec()

        for context in contexts.values():
            context["events_before"] = [
                serialize_event(e, time_now)
                for e in context["events_before"]
            ]
            context["events_after"] = [
                serialize_event(e, time_now)
                for e in context["events_after"]
            ]

        state_results = {}
        if include_state:
            rooms = set(e.room_id for e in allowed_events)
            for room_id in rooms:
                state = yield self.state_handler.get_current_state(room_id)
                state_results[room_id] = state.values()

            state_results.values()

        # We're now about to serialize the events. We should not make any
        # blocking calls after this. Otherwise the 'age' will be wrong

        results = [
            {
                "rank": rank_map[e.event_id],
                "result": serialize_event(e, time_now),
                "context": contexts.get(e.event_id, {}),
            }
            for e in allowed_events
        ]

        rooms_cat_res = {
            "results": results,
            "count": count,
            "highlights": list(highlights),
        }

        if state_results:
            rooms_cat_res["state"] = {
                room_id: [serialize_event(e, time_now) for e in state]
                for room_id, state in state_results.items()
            }

        if room_groups and "room_id" in group_keys:
            rooms_cat_res.setdefault("groups", {})["room_id"] = room_groups

        if sender_group and "sender" in group_keys:
            rooms_cat_res.setdefault("groups", {})["sender"] = sender_group

        if global_next_batch:
            rooms_cat_res["next_batch"] = global_next_batch

        defer.returnValue({
            "search_categories": {
                "room_events": rooms_cat_res
            }
        })
Beispiel #22
0
    def get_state_events(
        self, user_id, room_id, state_filter=StateFilter.all(),
        at_token=None, is_guest=False,
    ):
        """Retrieve all state events for a given room. If the user is
        joined to the room then return the current state. If the user has
        left the room return the state events from when they left. If an explicit
        'at' parameter is passed, return the state events as of that event, if
        visible.

        Args:
            user_id(str): The user requesting state events.
            room_id(str): The room ID to get all state events from.
            state_filter (StateFilter): The state filter used to fetch state
                from the database.
            at_token(StreamToken|None): the stream token of the at which we are requesting
                the stats. If the user is not allowed to view the state as of that
                stream token, we raise a 403 SynapseError. If None, returns the current
                state based on the current_state_events table.
            is_guest(bool): whether this user is a guest
        Returns:
            A list of dicts representing state events. [{}, {}, {}]
        Raises:
            NotFoundError (404) if the at token does not yield an event

            AuthError (403) if the user doesn't have permission to view
            members of this room.
        """
        if at_token:
            # FIXME this claims to get the state at a stream position, but
            # get_recent_events_for_room operates by topo ordering. This therefore
            # does not reliably give you the state at the given stream position.
            # (https://github.com/matrix-org/synapse/issues/3305)
            last_events, _ = yield self.store.get_recent_events_for_room(
                room_id, end_token=at_token.room_key, limit=1,
            )

            if not last_events:
                raise NotFoundError("Can't find event for token %s" % (at_token, ))

            visible_events = yield filter_events_for_client(
                self.store, user_id, last_events,
            )

            event = last_events[0]
            if visible_events:
                room_state = yield self.store.get_state_for_events(
                    [event.event_id], state_filter=state_filter,
                )
                room_state = room_state[event.event_id]
            else:
                raise AuthError(
                    403,
                    "User %s not allowed to view events in room %s at token %s" % (
                        user_id, room_id, at_token,
                    )
                )
        else:
            membership, membership_event_id = (
                yield self.auth.check_in_room_or_world_readable(
                    room_id, user_id,
                )
            )

            if membership == Membership.JOIN:
                state_ids = yield self.store.get_filtered_current_state_ids(
                    room_id, state_filter=state_filter,
                )
                room_state = yield self.store.get_events(state_ids.values())
            elif membership == Membership.LEAVE:
                room_state = yield self.store.get_state_for_events(
                    [membership_event_id], state_filter=state_filter,
                )
                room_state = room_state[membership_event_id]

        now = self.clock.time_msec()
        events = yield self._event_serializer.serialize_events(
            room_state.values(), now,
        )
        defer.returnValue(events)
Beispiel #23
0
    def _load_filtered_recents(self, room_id, sync_config, now_token,
                               since_token=None, recents=None, newly_joined_room=False):
        """
        Returns:
            a Deferred TimelineBatch
        """
        with Measure(self.clock, "load_filtered_recents"):
            timeline_limit = sync_config.filter_collection.timeline_limit()

            if recents is None or newly_joined_room or timeline_limit < len(recents):
                limited = True
            else:
                limited = False

            if recents:
                recents = sync_config.filter_collection.filter_room_timeline(recents)
                recents = yield filter_events_for_client(
                    self.store,
                    sync_config.user.to_string(),
                    recents,
                )
            else:
                recents = []

            if not limited:
                defer.returnValue(TimelineBatch(
                    events=recents,
                    prev_batch=now_token,
                    limited=False
                ))

            filtering_factor = 2
            load_limit = max(timeline_limit * filtering_factor, 10)
            max_repeat = 5  # Only try a few times per room, otherwise
            room_key = now_token.room_key
            end_key = room_key

            since_key = None
            if since_token and not newly_joined_room:
                since_key = since_token.room_key

            while limited and len(recents) < timeline_limit and max_repeat:
                events, end_key = yield self.store.get_room_events_stream_for_room(
                    room_id,
                    limit=load_limit + 1,
                    from_key=since_key,
                    to_key=end_key,
                )
                loaded_recents = sync_config.filter_collection.filter_room_timeline(
                    events
                )
                loaded_recents = yield filter_events_for_client(
                    self.store,
                    sync_config.user.to_string(),
                    loaded_recents,
                )
                loaded_recents.extend(recents)
                recents = loaded_recents

                if len(events) <= load_limit:
                    limited = False
                    break
                max_repeat -= 1

            if len(recents) > timeline_limit:
                limited = True
                recents = recents[-timeline_limit:]
                room_key = recents[0].internal_metadata.before

            prev_batch_token = now_token.copy_and_replace(
                "room_key", room_key
            )

        defer.returnValue(TimelineBatch(
            events=recents,
            prev_batch=prev_batch_token,
            limited=limited or newly_joined_room
        ))
Beispiel #24
0
    def get_messages(
        self,
        requester,
        room_id=None,
        pagin_config=None,
        as_client_event=True,
        event_filter=None,
    ):
        """Get messages in a room.

        Args:
            requester (Requester): The user requesting messages.
            room_id (str): The room they want messages from.
            pagin_config (synapse.api.streams.PaginationConfig): The pagination
                config rules to apply, if any.
            as_client_event (bool): True to get events in client-server format.
            event_filter (Filter): Filter to apply to results or None
        Returns:
            dict: Pagination API results
        """
        user_id = requester.user.to_string()

        if pagin_config.from_token:
            room_token = pagin_config.from_token.room_key
        else:
            pagin_config.from_token = (
                yield self.hs.get_event_sources().get_current_token_for_pagination()
            )
            room_token = pagin_config.from_token.room_key

        room_token = RoomStreamToken.parse(room_token)

        pagin_config.from_token = pagin_config.from_token.copy_and_replace(
            "room_key", str(room_token)
        )

        source_config = pagin_config.get_source_config("room")

        with (yield self.pagination_lock.read(room_id)):
            (
                membership,
                member_event_id,
            ) = yield self.auth.check_in_room_or_world_readable(room_id, user_id)

            if source_config.direction == "b":
                # if we're going backwards, we might need to backfill. This
                # requires that we have a topo token.
                if room_token.topological:
                    max_topo = room_token.topological
                else:
                    max_topo = yield self.store.get_max_topological_token(
                        room_id, room_token.stream
                    )

                if membership == Membership.LEAVE:
                    # If they have left the room then clamp the token to be before
                    # they left the room, to save the effort of loading from the
                    # database.
                    leave_token = yield self.store.get_topological_token_for_event(
                        member_event_id
                    )
                    leave_token = RoomStreamToken.parse(leave_token)
                    if leave_token.topological < max_topo:
                        source_config.from_key = str(leave_token)

                yield self.hs.get_handlers().federation_handler.maybe_backfill(
                    room_id, max_topo
                )

            events, next_key = yield self.store.paginate_room_events(
                room_id=room_id,
                from_key=source_config.from_key,
                to_key=source_config.to_key,
                direction=source_config.direction,
                limit=source_config.limit,
                event_filter=event_filter,
            )

            next_token = pagin_config.from_token.copy_and_replace("room_key", next_key)

        if events:
            if event_filter:
                events = event_filter.filter(events)

            events = yield filter_events_for_client(
                self.storage, user_id, events, is_peeking=(member_event_id is None)
            )

        if not events:
            return {
                "chunk": [],
                "start": pagin_config.from_token.to_string(),
                "end": next_token.to_string(),
            }

        state = None
        if event_filter and event_filter.lazy_load_members() and len(events) > 0:
            # TODO: remove redundant members

            # FIXME: we also care about invite targets etc.
            state_filter = StateFilter.from_types(
                (EventTypes.Member, event.sender) for event in events
            )

            state_ids = yield self.state_store.get_state_ids_for_event(
                events[0].event_id, state_filter=state_filter
            )

            if state_ids:
                state = yield self.store.get_events(list(state_ids.values()))
                state = state.values()

        time_now = self.clock.time_msec()

        chunk = {
            "chunk": (
                yield self._event_serializer.serialize_events(
                    events, time_now, as_client_event=as_client_event
                )
            ),
            "start": pagin_config.from_token.to_string(),
            "end": next_token.to_string(),
        }

        if state:
            chunk["state"] = yield self._event_serializer.serialize_events(
                state, time_now, as_client_event=as_client_event
            )

        return chunk
Beispiel #25
0
    def get_messages(self, requester, room_id=None, pagin_config=None,
                     as_client_event=True, event_filter=None):
        """Get messages in a room.

        Args:
            requester (Requester): The user requesting messages.
            room_id (str): The room they want messages from.
            pagin_config (synapse.api.streams.PaginationConfig): The pagination
                config rules to apply, if any.
            as_client_event (bool): True to get events in client-server format.
            event_filter (Filter): Filter to apply to results or None
        Returns:
            dict: Pagination API results
        """
        user_id = requester.user.to_string()

        if pagin_config.from_token:
            room_token = pagin_config.from_token.room_key
        else:
            pagin_config.from_token = (
                yield self.hs.get_event_sources().get_current_token_for_room(
                    room_id=room_id
                )
            )
            room_token = pagin_config.from_token.room_key

        room_token = RoomStreamToken.parse(room_token)

        pagin_config.from_token = pagin_config.from_token.copy_and_replace(
            "room_key", str(room_token)
        )

        source_config = pagin_config.get_source_config("room")

        with (yield self.pagination_lock.read(room_id)):
            membership, member_event_id = yield self.auth.check_in_room_or_world_readable(
                room_id, user_id
            )

            if source_config.direction == 'b':
                # if we're going backwards, we might need to backfill. This
                # requires that we have a topo token.
                if room_token.topological:
                    max_topo = room_token.topological
                else:
                    max_topo = yield self.store.get_max_topological_token(
                        room_id, room_token.stream
                    )

                if membership == Membership.LEAVE:
                    # If they have left the room then clamp the token to be before
                    # they left the room, to save the effort of loading from the
                    # database.
                    leave_token = yield self.store.get_topological_token_for_event(
                        member_event_id
                    )
                    leave_token = RoomStreamToken.parse(leave_token)
                    if leave_token.topological < max_topo:
                        source_config.from_key = str(leave_token)

                yield self.hs.get_handlers().federation_handler.maybe_backfill(
                    room_id, max_topo
                )

            events, next_key = yield self.store.paginate_room_events(
                room_id=room_id,
                from_key=source_config.from_key,
                to_key=source_config.to_key,
                direction=source_config.direction,
                limit=source_config.limit,
                event_filter=event_filter,
            )

            next_token = pagin_config.from_token.copy_and_replace(
                "room_key", next_key
            )

        if events:
            if event_filter:
                events = event_filter.filter(events)

            events = yield filter_events_for_client(
                self.store,
                user_id,
                events,
                is_peeking=(member_event_id is None),
            )

        if not events:
            defer.returnValue({
                "chunk": [],
                "start": pagin_config.from_token.to_string(),
                "end": next_token.to_string(),
            })

        state = None
        if event_filter and event_filter.lazy_load_members():
            # TODO: remove redundant members

            # FIXME: we also care about invite targets etc.
            state_filter = StateFilter.from_types(
                (EventTypes.Member, event.sender)
                for event in events
            )

            state_ids = yield self.store.get_state_ids_for_event(
                events[0].event_id, state_filter=state_filter,
            )

            if state_ids:
                state = yield self.store.get_events(list(state_ids.values()))
                state = state.values()

        time_now = self.clock.time_msec()

        chunk = {
            "chunk": [
                serialize_event(e, time_now, as_client_event)
                for e in events
            ],
            "start": pagin_config.from_token.to_string(),
            "end": next_token.to_string(),
        }

        if state:
            chunk["state"] = [
                serialize_event(e, time_now, as_client_event)
                for e in state
            ]

        defer.returnValue(chunk)
Beispiel #26
0
    def get_state_events(
        self,
        user_id,
        room_id,
        types=None,
        filtered_types=None,
        at_token=None,
        is_guest=False,
    ):
        """Retrieve all state events for a given room. If the user is
        joined to the room then return the current state. If the user has
        left the room return the state events from when they left. If an explicit
        'at' parameter is passed, return the state events as of that event, if
        visible.

        Args:
            user_id(str): The user requesting state events.
            room_id(str): The room ID to get all state events from.
            types(list[(str, str|None)]|None): List of (type, state_key) tuples
                which are used to filter the state fetched. If `state_key` is None,
                all events are returned of the given type.
                May be None, which matches any key.
            filtered_types(list[str]|None): Only apply filtering via `types` to this
                list of event types.  Other types of events are returned unfiltered.
                If None, `types` filtering is applied to all events.
            at_token(StreamToken|None): the stream token of the at which we are requesting
                the stats. If the user is not allowed to view the state as of that
                stream token, we raise a 403 SynapseError. If None, returns the current
                state based on the current_state_events table.
            is_guest(bool): whether this user is a guest
        Returns:
            A list of dicts representing state events. [{}, {}, {}]
        Raises:
            NotFoundError (404) if the at token does not yield an event

            AuthError (403) if the user doesn't have permission to view
            members of this room.
        """
        if at_token:
            # FIXME this claims to get the state at a stream position, but
            # get_recent_events_for_room operates by topo ordering. This therefore
            # does not reliably give you the state at the given stream position.
            # (https://github.com/matrix-org/synapse/issues/3305)
            last_events, _ = yield self.store.get_recent_events_for_room(
                room_id,
                end_token=at_token.room_key,
                limit=1,
            )

            if not last_events:
                raise NotFoundError("Can't find event for token %s" %
                                    (at_token, ))

            visible_events = yield filter_events_for_client(
                self.store,
                user_id,
                last_events,
            )

            event = last_events[0]
            if visible_events:
                room_state = yield self.store.get_state_for_events(
                    [event.event_id],
                    types,
                    filtered_types=filtered_types,
                )
                room_state = room_state[event.event_id]
            else:
                raise AuthError(
                    403,
                    "User %s not allowed to view events in room %s at token %s"
                    % (
                        user_id,
                        room_id,
                        at_token,
                    ))
        else:
            membership, membership_event_id = (
                yield self.auth.check_in_room_or_world_readable(
                    room_id,
                    user_id,
                ))

            if membership == Membership.JOIN:
                state_ids = yield self.store.get_filtered_current_state_ids(
                    room_id,
                    types,
                    filtered_types=filtered_types,
                )
                room_state = yield self.store.get_events(state_ids.values())
            elif membership == Membership.LEAVE:
                room_state = yield self.store.get_state_for_events(
                    [membership_event_id],
                    types,
                    filtered_types=filtered_types,
                )
                room_state = room_state[membership_event_id]

        now = self.clock.time_msec()
        defer.returnValue(
            [serialize_event(c, now) for c in room_state.values()])
Beispiel #27
0
    def _room_initial_sync_joined(self, user_id, room_id, pagin_config,
                                  membership, is_peeking):
        current_state = yield self.state.get_current_state(
            room_id=room_id,
        )

        # TODO: These concurrently
        time_now = self.clock.time_msec()
        state = [
            serialize_event(x, time_now)
            for x in current_state.values()
        ]

        now_token = yield self.hs.get_event_sources().get_current_token()

        limit = pagin_config.limit if pagin_config else None
        if limit is None:
            limit = 10

        room_members = [
            m for m in current_state.values()
            if m.type == EventTypes.Member
            and m.content["membership"] == Membership.JOIN
        ]

        presence_handler = self.hs.get_presence_handler()

        @defer.inlineCallbacks
        def get_presence():
            states = yield presence_handler.get_states(
                [m.user_id for m in room_members],
                as_event=True,
            )

            defer.returnValue(states)

        @defer.inlineCallbacks
        def get_receipts():
            receipts_handler = self.hs.get_handlers().receipts_handler
            receipts = yield receipts_handler.get_receipts_for_room(
                room_id,
                now_token.receipt_key
            )
            defer.returnValue(receipts)

        presence, receipts, (messages, token) = yield defer.gatherResults(
            [
                preserve_fn(get_presence)(),
                preserve_fn(get_receipts)(),
                preserve_fn(self.store.get_recent_events_for_room)(
                    room_id,
                    limit=limit,
                    end_token=now_token.room_key,
                )
            ],
            consumeErrors=True,
        ).addErrback(unwrapFirstError)

        messages = yield filter_events_for_client(
            self.store, user_id, messages, is_peeking=is_peeking,
        )

        start_token = now_token.copy_and_replace("room_key", token[0])
        end_token = now_token.copy_and_replace("room_key", token[1])

        time_now = self.clock.time_msec()

        ret = {
            "room_id": room_id,
            "messages": {
                "chunk": [serialize_event(m, time_now) for m in messages],
                "start": start_token.to_string(),
                "end": end_token.to_string(),
            },
            "state": state,
            "presence": presence,
            "receipts": receipts,
        }
        if not is_peeking:
            ret["membership"] = membership

        defer.returnValue(ret)
Beispiel #28
0
    def export_user_data(self, user_id, writer):
        """Write all data we have on the user to the given writer.

        Args:
            user_id (str)
            writer (ExfiltrationWriter)

        Returns:
            defer.Deferred: Resolves when all data for a user has been written.
            The returned value is that returned by `writer.finished()`.
        """
        # Get all rooms the user is in or has been in
        rooms = yield self.store.get_rooms_for_user_where_membership_is(
            user_id,
            membership_list=(
                Membership.JOIN,
                Membership.LEAVE,
                Membership.BAN,
                Membership.INVITE,
            ),
        )

        # We only try and fetch events for rooms the user has been in. If
        # they've been e.g. invited to a room without joining then we handle
        # those seperately.
        rooms_user_has_been_in = yield self.store.get_rooms_user_has_been_in(
            user_id)

        for index, room in enumerate(rooms):
            room_id = room.room_id

            logger.info("[%s] Handling room %s, %d/%d", user_id, room_id,
                        index + 1, len(rooms))

            forgotten = yield self.store.did_forget(user_id, room_id)
            if forgotten:
                logger.info("[%s] User forgot room %d, ignoring", user_id,
                            room_id)
                continue

            if room_id not in rooms_user_has_been_in:
                # If we haven't been in the rooms then the filtering code below
                # won't return anything, so we need to handle these cases
                # explicitly.

                if room.membership == Membership.INVITE:
                    event_id = room.event_id
                    invite = yield self.store.get_event(event_id,
                                                        allow_none=True)
                    if invite:
                        invited_state = invite.unsigned["invite_room_state"]
                        writer.write_invite(room_id, invite, invited_state)

                continue

            # We only want to bother fetching events up to the last time they
            # were joined. We estimate that point by looking at the
            # stream_ordering of the last membership if it wasn't a join.
            if room.membership == Membership.JOIN:
                stream_ordering = yield self.store.get_room_max_stream_ordering(
                )
            else:
                stream_ordering = room.stream_ordering

            from_key = str(RoomStreamToken(0, 0))
            to_key = str(RoomStreamToken(None, stream_ordering))

            written_events = set()  # Events that we've processed in this room

            # We need to track gaps in the events stream so that we can then
            # write out the state at those events. We do this by keeping track
            # of events whose prev events we haven't seen.

            # Map from event ID to prev events that haven't been processed,
            # dict[str, set[str]].
            event_to_unseen_prevs = {}

            # The reverse mapping to above, i.e. map from unseen event to events
            # that have the unseen event in their prev_events, i.e. the unseen
            # events "children". dict[str, set[str]]
            unseen_to_child_events = {}

            # We fetch events in the room the user could see by fetching *all*
            # events that we have and then filtering, this isn't the most
            # efficient method perhaps but it does guarantee we get everything.
            while True:
                events, _ = yield self.store.paginate_room_events(
                    room_id, from_key, to_key, limit=100, direction="f")
                if not events:
                    break

                from_key = events[-1].internal_metadata.after

                events = yield filter_events_for_client(
                    self.store, user_id, events)

                writer.write_events(room_id, events)

                # Update the extremity tracking dicts
                for event in events:
                    # Check if we have any prev events that haven't been
                    # processed yet, and add those to the appropriate dicts.
                    unseen_events = set(
                        event.prev_event_ids()) - written_events
                    if unseen_events:
                        event_to_unseen_prevs[event.event_id] = unseen_events
                        for unseen in unseen_events:
                            unseen_to_child_events.setdefault(
                                unseen, set()).add(event.event_id)

                    # Now check if this event is an unseen prev event, if so
                    # then we remove this event from the appropriate dicts.
                    for child_id in unseen_to_child_events.pop(
                            event.event_id, []):
                        event_to_unseen_prevs[child_id].discard(event.event_id)

                    written_events.add(event.event_id)

                logger.info("Written %d events in room %s",
                            len(written_events), room_id)

            # Extremities are the events who have at least one unseen prev event.
            extremities = (
                event_id
                for event_id, unseen_prevs in event_to_unseen_prevs.items()
                if unseen_prevs)
            for event_id in extremities:
                if not event_to_unseen_prevs[event_id]:
                    continue
                state = yield self.store.get_state_for_event(event_id)
                writer.write_state(room_id, event_id, state)

        defer.returnValue(writer.finished())
Beispiel #29
0
        def handle_room(event):
            d = {
                "room_id":
                event.room_id,
                "membership":
                event.membership,
                "visibility":
                ("public" if event.room_id in public_room_ids else "private"),
            }

            if event.membership == Membership.INVITE:
                time_now = self.clock.time_msec()
                d["inviter"] = event.sender

                invite_event = yield self.store.get_event(event.event_id)
                d["invite"] = yield self._event_serializer.serialize_event(
                    invite_event,
                    time_now,
                    as_client_event,
                )

            rooms_ret.append(d)

            if event.membership not in (Membership.JOIN, Membership.LEAVE):
                return

            try:
                if event.membership == Membership.JOIN:
                    room_end_token = now_token.room_key
                    deferred_room_state = run_in_background(
                        self.state_handler.get_current_state,
                        event.room_id,
                    )
                elif event.membership == Membership.LEAVE:
                    room_end_token = "s%d" % (event.stream_ordering, )
                    deferred_room_state = run_in_background(
                        self.store.get_state_for_events,
                        [event.event_id],
                    )
                    deferred_room_state.addCallback(
                        lambda states: states[event.event_id])

                (messages,
                 token), current_state = yield make_deferred_yieldable(
                     defer.gatherResults([
                         run_in_background(
                             self.store.get_recent_events_for_room,
                             event.room_id,
                             limit=limit,
                             end_token=room_end_token,
                         ),
                         deferred_room_state,
                     ])).addErrback(unwrapFirstError)

                messages = yield filter_events_for_client(
                    self.store, user_id, messages)

                start_token = now_token.copy_and_replace("room_key", token)
                end_token = now_token.copy_and_replace("room_key",
                                                       room_end_token)
                time_now = self.clock.time_msec()

                d["messages"] = {
                    "chunk": (yield self._event_serializer.serialize_events(
                        messages,
                        time_now=time_now,
                        as_client_event=as_client_event,
                    )),
                    "start":
                    start_token.to_string(),
                    "end":
                    end_token.to_string(),
                }

                d["state"] = yield self._event_serializer.serialize_events(
                    current_state.values(),
                    time_now=time_now,
                    as_client_event=as_client_event)

                account_data_events = []
                tags = tags_by_room.get(event.room_id)
                if tags:
                    account_data_events.append({
                        "type": "m.tag",
                        "content": {
                            "tags": tags
                        },
                    })

                account_data = account_data_by_room.get(event.room_id, {})
                for account_data_type, content in account_data.items():
                    account_data_events.append({
                        "type": account_data_type,
                        "content": content,
                    })

                d["account_data"] = account_data_events
            except Exception:
                logger.exception("Failed to get snapshot")
Beispiel #30
0
        def handle_room(event):
            d = {
                "room_id": event.room_id,
                "membership": event.membership,
                "visibility": (
                    "public" if event.room_id in public_room_ids
                    else "private"
                ),
            }

            if event.membership == Membership.INVITE:
                time_now = self.clock.time_msec()
                d["inviter"] = event.sender

                invite_event = yield self.store.get_event(event.event_id)
                d["invite"] = serialize_event(invite_event, time_now, as_client_event)

            rooms_ret.append(d)

            if event.membership not in (Membership.JOIN, Membership.LEAVE):
                return

            try:
                if event.membership == Membership.JOIN:
                    room_end_token = now_token.room_key
                    deferred_room_state = run_in_background(
                        self.state_handler.get_current_state,
                        event.room_id,
                    )
                elif event.membership == Membership.LEAVE:
                    room_end_token = "s%d" % (event.stream_ordering,)
                    deferred_room_state = run_in_background(
                        self.store.get_state_for_events,
                        [event.event_id],
                    )
                    deferred_room_state.addCallback(
                        lambda states: states[event.event_id]
                    )

                (messages, token), current_state = yield make_deferred_yieldable(
                    defer.gatherResults(
                        [
                            run_in_background(
                                self.store.get_recent_events_for_room,
                                event.room_id,
                                limit=limit,
                                end_token=room_end_token,
                            ),
                            deferred_room_state,
                        ]
                    )
                ).addErrback(unwrapFirstError)

                messages = yield filter_events_for_client(
                    self.store, user_id, messages
                )

                start_token = now_token.copy_and_replace("room_key", token)
                end_token = now_token.copy_and_replace("room_key", room_end_token)
                time_now = self.clock.time_msec()

                d["messages"] = {
                    "chunk": [
                        serialize_event(m, time_now, as_client_event)
                        for m in messages
                    ],
                    "start": start_token.to_string(),
                    "end": end_token.to_string(),
                }

                d["state"] = [
                    serialize_event(c, time_now, as_client_event)
                    for c in current_state.values()
                ]

                account_data_events = []
                tags = tags_by_room.get(event.room_id)
                if tags:
                    account_data_events.append({
                        "type": "m.tag",
                        "content": {"tags": tags},
                    })

                account_data = account_data_by_room.get(event.room_id, {})
                for account_data_type, content in account_data.items():
                    account_data_events.append({
                        "type": account_data_type,
                        "content": content,
                    })

                d["account_data"] = account_data_events
            except Exception:
                logger.exception("Failed to get snapshot")
Beispiel #31
0
    def get_messages(self,
                     requester,
                     room_id=None,
                     pagin_config=None,
                     as_client_event=True,
                     event_filter=None):
        """Get messages in a room.

        Args:
            requester (Requester): The user requesting messages.
            room_id (str): The room they want messages from.
            pagin_config (synapse.api.streams.PaginationConfig): The pagination
                config rules to apply, if any.
            as_client_event (bool): True to get events in client-server format.
            event_filter (Filter): Filter to apply to results or None
        Returns:
            dict: Pagination API results
        """
        user_id = requester.user.to_string()

        if pagin_config.from_token:
            room_token = pagin_config.from_token.room_key
        else:
            pagin_config.from_token = (
                yield self.hs.get_event_sources().get_current_token_for_room(
                    room_id=room_id))
            room_token = pagin_config.from_token.room_key

        room_token = RoomStreamToken.parse(room_token)

        pagin_config.from_token = pagin_config.from_token.copy_and_replace(
            "room_key", str(room_token))

        source_config = pagin_config.get_source_config("room")

        with (yield self.pagination_lock.read(room_id)):
            membership, member_event_id = yield self._check_in_room_or_world_readable(
                room_id, user_id)

            if source_config.direction == 'b':
                # if we're going backwards, we might need to backfill. This
                # requires that we have a topo token.
                if room_token.topological:
                    max_topo = room_token.topological
                else:
                    max_topo = yield self.store.get_max_topological_token(
                        room_id, room_token.stream)

                if membership == Membership.LEAVE:
                    # If they have left the room then clamp the token to be before
                    # they left the room, to save the effort of loading from the
                    # database.
                    leave_token = yield self.store.get_topological_token_for_event(
                        member_event_id)
                    leave_token = RoomStreamToken.parse(leave_token)
                    if leave_token.topological < max_topo:
                        source_config.from_key = str(leave_token)

                yield self.hs.get_handlers().federation_handler.maybe_backfill(
                    room_id, max_topo)

            events, next_key = yield self.store.paginate_room_events(
                room_id=room_id,
                from_key=source_config.from_key,
                to_key=source_config.to_key,
                direction=source_config.direction,
                limit=source_config.limit,
                event_filter=event_filter,
            )

            next_token = pagin_config.from_token.copy_and_replace(
                "room_key", next_key)

        if not events:
            defer.returnValue({
                "chunk": [],
                "start": pagin_config.from_token.to_string(),
                "end": next_token.to_string(),
            })

        if event_filter:
            events = event_filter.filter(events)

        events = yield filter_events_for_client(
            self.store,
            user_id,
            events,
            is_peeking=(member_event_id is None),
        )

        time_now = self.clock.time_msec()

        chunk = {
            "chunk":
            [serialize_event(e, time_now, as_client_event) for e in events],
            "start":
            pagin_config.from_token.to_string(),
            "end":
            next_token.to_string(),
        }

        defer.returnValue(chunk)
Beispiel #32
0
    def test_out_of_band_invite_rejection(self):
        # this is where we have received an invite event over federation, and then
        # rejected it.
        invite_pdu = {
            "room_id": "!room:id",
            "depth": 1,
            "auth_events": [],
            "prev_events": [],
            "origin_server_ts": 1,
            "sender": "@someone:" + self.OTHER_SERVER_NAME,
            "type": "m.room.member",
            "state_key": "@user:test",
            "content": {
                "membership": "invite"
            },
        }
        self.add_hashes_and_signatures(invite_pdu)
        invite_event_id = make_event_from_dict(invite_pdu,
                                               RoomVersions.V9).event_id

        self.get_success(self.hs.get_federation_server().on_invite_request(
            self.OTHER_SERVER_NAME,
            invite_pdu,
            "9",
        ))

        # stub out do_remotely_reject_invite so that we fall back to a locally-
        # generated rejection
        with patch.object(
                self.hs.get_federation_handler(),
                "do_remotely_reject_invite",
                side_effect=Exception(),
        ):
            reject_event_id, _ = self.get_success(
                self.hs.get_room_member_handler().remote_reject_invite(
                    invite_event_id,
                    txn_id=None,
                    requester=create_requester("@user:test"),
                    content={},
                ))

        invite_event, reject_event = self.get_success(
            self.hs.get_datastores().main.get_events_as_list(
                [invite_event_id, reject_event_id]))

        # the invited user should be able to see both the invite and the rejection
        self.assertEqual(
            self.get_success(
                filter_events_for_client(
                    self.hs.get_storage_controllers(),
                    "@user:test",
                    [invite_event, reject_event],
                )),
            [invite_event, reject_event],
        )

        # other users should see neither
        self.assertEqual(
            self.get_success(
                filter_events_for_client(
                    self.hs.get_storage_controllers(),
                    "@other:test",
                    [invite_event, reject_event],
                )),
            [],
        )