def on_GET(self, request, room_id, event_id): requester = yield self.auth.get_user_by_req(request, allow_guest=True) limit = int(request.args.get("limit", [10])[0]) results = yield self.handlers.room_context_handler.get_event_context( requester.user, room_id, event_id, limit, requester.is_guest, ) if not results: raise SynapseError( 404, "Event not found.", errcode=Codes.NOT_FOUND ) time_now = self.clock.time_msec() results["events_before"] = [ serialize_event(event, time_now) for event in results["events_before"] ] results["event"] = serialize_event(results["event"], time_now) results["events_after"] = [ serialize_event(event, time_now) for event in results["events_after"] ] results["state"] = [ serialize_event(event, time_now) for event in results["state"] ] defer.returnValue((200, results))
def on_GET(self, request, room_id, event_id): requester = yield self.auth.get_user_by_req(request, allow_guest=True) limit = int(request.args.get("limit", [10])[0]) results = yield self.handlers.room_context_handler.get_event_context( requester.user, room_id, event_id, limit, ) if not results: raise SynapseError(404, "Event not found.", errcode=Codes.NOT_FOUND) time_now = self.clock.time_msec() results["events_before"] = [ serialize_event(event, time_now) for event in results["events_before"] ] results["event"] = serialize_event(results["event"], time_now) results["events_after"] = [ serialize_event(event, time_now) for event in results["events_after"] ] results["state"] = [ serialize_event(event, time_now) for event in results["state"] ] defer.returnValue((200, results))
def on_GET(self, request, room_id, event_id): user, _, is_guest = yield self.auth.get_user_by_req(request, allow_guest=True) limit = int(request.args.get("limit", [10])[0]) results = yield self.handlers.room_context_handler.get_event_context( user, room_id, event_id, limit, is_guest) time_now = self.clock.time_msec() results["events_before"] = [ serialize_event(event, time_now) for event in results["events_before"] ] results["events_after"] = [ serialize_event(event, time_now) for event in results["events_after"] ] results["state"] = [ serialize_event(event, time_now) for event in results["state"] ] logger.info("Responding with %r", results) defer.returnValue((200, results))
def encode_room(room, filter, time_now, token_id): event_map = {} state_events = filter.filter_room_state(room.state) recent_events = filter.filter_room_events(room.events) state_event_ids = [] recent_event_ids = [] for event in state_events: # TODO(mjark): Respect formatting requirements in the filter. event_map[event.event_id] = serialize_event( event, time_now, token_id=token_id, event_format=format_event_for_client_v2_without_event_id, ) state_event_ids.append(event.event_id) for event in recent_events: # TODO(mjark): Respect formatting requirements in the filter. event_map[event.event_id] = serialize_event( event, time_now, token_id=token_id, event_format=format_event_for_client_v2_without_event_id, ) recent_event_ids.append(event.event_id) result = { "room_id": room.room_id, "event_map": event_map, "events": { "batch": recent_event_ids, "prev_batch": room.prev_batch.to_string(), }, "state": state_event_ids, "limited": room.limited, "published": room.published, "ephemeral": room.ephemeral, } return result
def handle_room(event): d = { "room_id": event.room_id, "membership": event.membership, "visibility": ( "public" if event.room_id in public_room_ids else "private" ), } if event.membership == Membership.INVITE: d["inviter"] = event.sender rooms_ret.append(d) if event.membership != Membership.JOIN: return try: (messages, token), current_state = yield defer.gatherResults( [ self.store.get_recent_events_for_room( event.room_id, limit=limit, end_token=now_token.room_key, ), self.state_handler.get_current_state( event.room_id ), ] ).addErrback(unwrapFirstError) messages = yield self._filter_events_for_client( user_id, event.room_id, messages ) start_token = now_token.copy_and_replace("room_key", token[0]) end_token = now_token.copy_and_replace("room_key", token[1]) time_now = self.clock.time_msec() d["messages"] = { "chunk": [ serialize_event(m, time_now, as_client_event) for m in messages ], "start": start_token.to_string(), "end": end_token.to_string(), } d["state"] = [ serialize_event(c, time_now, as_client_event) for c in current_state.values() ] except: logger.exception("Failed to get snapshot")
def handle_room(event): d = { "room_id": event.room_id, "membership": event.membership, "visibility": ("public" if event.room_id in public_room_ids else "private"), } if event.membership == Membership.INVITE: d["inviter"] = event.sender rooms_ret.append(d) if event.membership != Membership.JOIN: return try: (messages, token), current_state = yield defer.gatherResults([ self.store.get_recent_events_for_room( event.room_id, limit=limit, end_token=now_token.room_key, ), self.state_handler.get_current_state(event.room_id), ]).addErrback(unwrapFirstError) messages = yield self._filter_events_for_client( user_id, event.room_id, messages) start_token = now_token.copy_and_replace("room_key", token[0]) end_token = now_token.copy_and_replace("room_key", token[1]) time_now = self.clock.time_msec() d["messages"] = { "chunk": [ serialize_event(m, time_now, as_client_event) for m in messages ], "start": start_token.to_string(), "end": end_token.to_string(), } d["state"] = [ serialize_event(c, time_now, as_client_event) for c in current_state.values() ] except: logger.exception("Failed to get snapshot")
def encode_invited(self, rooms, time_now, token_id): """ Encode the invited rooms in a sync result Args: rooms(list[synapse.handlers.sync.InvitedSyncResult]): list of sync results for rooms this user is joined to time_now(int): current time - used as a baseline for age calculations token_id(int): ID of the user's auth token - used for namespacing of transaction IDs Returns: dict[str, dict[str, object]]: the invited rooms list, in our response format """ invited = {} for room in rooms: invite = serialize_event( room.invite, time_now, token_id=token_id, event_format=format_event_for_client_v2_without_room_id, ) unsigned = dict(invite.get("unsigned", {})) invite["unsigned"] = unsigned invited_state = list(unsigned.pop("invite_room_state", [])) invited_state.append(invite) invited[room.room_id] = {"invite_state": {"events": invited_state}} return invited
def encode_invited(self, rooms, filter, time_now, token_id): """ Encode the invited rooms in a sync result :param list[synapse.handlers.sync.InvitedSyncResult] rooms: list of sync results for rooms this user is joined to :param FilterCollection filter: filters to apply to the results :param int time_now: current time - used as a baseline for age calculations :param int token_id: ID of the user's auth token - used for namespacing of transaction IDs :return: the invited rooms list, in our response format :rtype: dict[str, dict[str, object]] """ invited = {} for room in rooms: invite = serialize_event( room.invite, time_now, token_id=token_id, event_format=format_event_for_client_v2_without_room_id, ) invited_state = invite.get("unsigned", {}).pop("invite_room_state", []) invited_state.append(invite) invited[room.room_id] = { "invite_state": {"events": invited_state} } return invited
def serialize(event): # TODO(mjark): Respect formatting requirements in the filter. return serialize_event( event, time_now, token_id=token_id, event_format=format_event_for_client_v2_without_room_id, only_event_fields=only_fields, )
def get_room_members_as_pagination_chunk(self, room_id=None, user_id=None, limit=0, start_tok=None, end_tok=None): """Retrieve a list of room members in the room. Args: room_id (str): The room to get the member list for. user_id (str): The ID of the user making the request. limit (int): The max number of members to return. start_tok (str): Optional. The start token if known. end_tok (str): Optional. The end token if known. Returns: dict: A Pagination streamable dict. Raises: SynapseError if something goes wrong. """ yield self.auth.check_joined_room(room_id, user_id) member_list = yield self.store.get_room_members(room_id=room_id) time_now = self.clock.time_msec() event_list = [ serialize_event(entry, time_now) for entry in member_list ] chunk_data = { "start": "START", # FIXME (erikj): START is no longer valid "end": "END", "chunk": event_list } # TODO honor Pagination stream params # TODO snapshot this list to return on subsequent requests when # paginating defer.returnValue(chunk_data)
def encode_invited(rooms, time_now, token_id): """ Encode the invited rooms in a sync result Args: rooms(list[synapse.handlers.sync.InvitedSyncResult]): list of sync results for rooms this user is joined to time_now(int): current time - used as a baseline for age calculations token_id(int): ID of the user's auth token - used for namespacing of transaction IDs Returns: dict[str, dict[str, object]]: the invited rooms list, in our response format """ invited = {} for room in rooms: invite = serialize_event( room.invite, time_now, token_id=token_id, event_format=format_event_for_client_v2_without_room_id, is_invite=True, ) unsigned = dict(invite.get("unsigned", {})) invite["unsigned"] = unsigned invited_state = list(unsigned.pop("invite_room_state", [])) invited_state.append(invite) invited[room.room_id] = { "invite_state": {"events": invited_state} } return invited
def get_state_events(self, user_id, room_id, is_guest=False): """Retrieve all state events for a given room. If the user is joined to the room then return the current state. If the user has left the room return the state events from when they left. Args: user_id(str): The user requesting state events. room_id(str): The room ID to get all state events from. Returns: A list of dicts representing state events. [{}, {}, {}] """ membership, membership_event_id = yield self._check_in_room_or_world_readable( room_id, user_id ) if membership == Membership.JOIN: room_state = yield self.state_handler.get_current_state(room_id) elif membership == Membership.LEAVE: room_state = yield self.store.get_state_for_events( [membership_event_id], None ) room_state = room_state[membership_event_id] now = self.clock.time_msec() defer.returnValue( [serialize_event(c, now) for c in room_state.values()] )
def serialize(event): return serialize_event( event, time_now, token_id=token_id, event_format=event_formatter, only_event_fields=only_fields, )
def on_GET(self, request): requester = yield self.auth.get_user_by_req(request) user_id = requester.user.to_string() from_token = parse_string(request, "from", required=False) limit = parse_integer(request, "limit", default=50) only = parse_string(request, "only", required=False) limit = min(limit, 500) push_actions = yield self.store.get_push_actions_for_user( user_id, from_token, limit, only_highlight=(only == "highlight")) receipts_by_room = yield self.store.get_receipts_for_user_with_orderings( user_id, 'm.read') notif_event_ids = [pa["event_id"] for pa in push_actions] notif_events = yield self.store.get_events(notif_event_ids) returned_push_actions = [] next_token = None for pa in push_actions: returned_pa = { "room_id": pa["room_id"], "profile_tag": pa["profile_tag"], "actions": pa["actions"], "ts": pa["received_ts"], "event": serialize_event( notif_events[pa["event_id"]], self.clock.time_msec(), event_format=format_event_for_client_v2_without_room_id, ), } if pa["room_id"] not in receipts_by_room: returned_pa["read"] = False else: receipt = receipts_by_room[pa["room_id"]] returned_pa["read"] = (receipt["topological_ordering"], receipt["stream_ordering"]) >= ( pa["topological_ordering"], pa["stream_ordering"]) returned_push_actions.append(returned_pa) next_token = pa["stream_ordering"] defer.returnValue((200, { "notifications": returned_push_actions, "next_token": next_token, }))
def on_GET(self, request, room_id, event_id): requester = yield self.auth.get_user_by_req(request) event = yield self.event_handler.get_event(requester.user, event_id) time_now = self.clock.time_msec() if event: defer.returnValue((200, serialize_event(event, time_now))) else: defer.returnValue((404, "Event not found."))
def on_GET(self, request, room_id, event_id): requester = yield self.auth.get_user_by_req(request, allow_guest=True) event = yield self.event_handler.get_event(requester.user, room_id, event_id) time_now = self.clock.time_msec() if event: defer.returnValue((200, serialize_event(event, time_now))) else: defer.returnValue((404, "Event not found."))
def get_stream(self, auth_user_id, pagin_config, timeout=0, as_client_event=True, affect_presence=True, only_room_events=False, room_id=None, is_guest=False): """Fetches the events stream for a given user. If `only_room_events` is `True` only room events will be returned. """ auth_user = UserID.from_string(auth_user_id) try: if affect_presence: yield self.started_stream(auth_user) if timeout: # If they've set a timeout set a minimum limit. timeout = max(timeout, 500) # Add some randomness to this value to try and mitigate against # thundering herds on restart. timeout = random.randint(int(timeout * 0.9), int(timeout * 1.1)) if is_guest: yield user_joined_room(self.distributor, auth_user, room_id) events, tokens = yield self.notifier.get_events_for( auth_user, pagin_config, timeout, only_room_events=only_room_events, is_guest=is_guest, guest_room_id=room_id) time_now = self.clock.time_msec() chunks = [ serialize_event(e, time_now, as_client_event) for e in events ] chunk = { "chunk": chunks, "start": tokens[0].to_string(), "end": tokens[1].to_string(), } defer.returnValue(chunk) finally: if affect_presence: self.stopped_stream(auth_user)
def _room_initial_sync_parted(self, user_id, room_id, pagin_config, membership, member_event_id, is_peeking): room_state = yield self.store.get_state_for_events( [member_event_id], ) room_state = room_state[member_event_id] limit = pagin_config.limit if pagin_config else None if limit is None: limit = 10 stream_token = yield self.store.get_stream_token_for_event( member_event_id ) messages, token = yield self.store.get_recent_events_for_room( room_id, limit=limit, end_token=stream_token ) messages = yield filter_events_for_client( self.store, user_id, messages, is_peeking=is_peeking ) start_token = StreamToken.START.copy_and_replace("room_key", token) end_token = StreamToken.START.copy_and_replace("room_key", stream_token) time_now = self.clock.time_msec() defer.returnValue({ "membership": membership, "room_id": room_id, "messages": { "chunk": [serialize_event(m, time_now) for m in messages], "start": start_token.to_string(), "end": end_token.to_string(), }, "state": [serialize_event(s, time_now) for s in room_state.values()], "presence": [], "receipts": [], })
def _room_initial_sync_parted(self, user_id, room_id, pagin_config, membership, member_event_id, is_peeking): room_state = yield self.store.get_state_for_events( [member_event_id], None ) room_state = room_state[member_event_id] limit = pagin_config.limit if pagin_config else None if limit is None: limit = 10 stream_token = yield self.store.get_stream_token_for_event( member_event_id ) messages, token = yield self.store.get_recent_events_for_room( room_id, limit=limit, end_token=stream_token ) messages = yield self._filter_events_for_client( user_id, messages, is_peeking=is_peeking ) start_token = StreamToken.START.copy_and_replace("room_key", token[0]) end_token = StreamToken.START.copy_and_replace("room_key", token[1]) time_now = self.clock.time_msec() defer.returnValue({ "membership": membership, "room_id": room_id, "messages": { "chunk": [serialize_event(m, time_now) for m in messages], "start": start_token.to_string(), "end": end_token.to_string(), }, "state": [serialize_event(s, time_now) for s in room_state.values()], "presence": [], "receipts": [], })
def on_GET(self, request, event_id): auth_user, _, _ = yield self.auth.get_user_by_req(request) handler = self.handlers.event_handler event = yield handler.get_event(auth_user, event_id) time_now = self.clock.time_msec() if event: defer.returnValue((200, serialize_event(event, time_now))) else: defer.returnValue((404, "Event not found."))
def on_GET(self, request): requester = yield self.auth.get_user_by_req(request) user_id = requester.user.to_string() from_token = parse_string(request, "from", required=False) limit = parse_integer(request, "limit", default=50) only = parse_string(request, "only", required=False) limit = min(limit, 500) push_actions = yield self.store.get_push_actions_for_user( user_id, from_token, limit, only_highlight=(only == "highlight") ) receipts_by_room = yield self.store.get_receipts_for_user_with_orderings( user_id, 'm.read' ) notif_event_ids = [pa["event_id"] for pa in push_actions] notif_events = yield self.store.get_events(notif_event_ids) returned_push_actions = [] next_token = None for pa in push_actions: returned_pa = { "room_id": pa["room_id"], "profile_tag": pa["profile_tag"], "actions": pa["actions"], "ts": pa["received_ts"], "event": serialize_event( notif_events[pa["event_id"]], self.clock.time_msec(), event_format=format_event_for_client_v2_without_room_id, ), } if pa["room_id"] not in receipts_by_room: returned_pa["read"] = False else: receipt = receipts_by_room[pa["room_id"]] returned_pa["read"] = ( receipt["topological_ordering"], receipt["stream_ordering"] ) >= ( pa["topological_ordering"], pa["stream_ordering"] ) returned_push_actions.append(returned_pa) next_token = str(pa["stream_ordering"]) defer.returnValue((200, { "notifications": returned_push_actions, "next_token": next_token, }))
def get_messages(self, user_id=None, room_id=None, pagin_config=None, feedback=False, as_client_event=True): """Get messages in a room. Args: user_id (str): The user requesting messages. room_id (str): The room they want messages from. pagin_config (synapse.api.streams.PaginationConfig): The pagination config rules to apply, if any. feedback (bool): True to get compressed feedback with the messages as_client_event (bool): True to get events in client-server format. Returns: dict: Pagination API results """ yield self.auth.check_joined_room(room_id, user_id) data_source = self.hs.get_event_sources().sources["room"] if not pagin_config.from_token: pagin_config.from_token = ( yield self.hs.get_event_sources().get_current_token( direction='b' ) ) room_token = RoomStreamToken.parse(pagin_config.from_token.room_key) if room_token.topological is None: raise SynapseError(400, "Invalid token") yield self.hs.get_handlers().federation_handler.maybe_backfill( room_id, room_token.topological ) user = UserID.from_string(user_id) events, next_key = yield data_source.get_pagination_rows( user, pagin_config.get_source_config("room"), room_id ) next_token = pagin_config.from_token.copy_and_replace( "room_key", next_key ) time_now = self.clock.time_msec() chunk = { "chunk": [ serialize_event(e, time_now, as_client_event) for e in events ], "start": pagin_config.from_token.to_string(), "end": next_token.to_string(), } defer.returnValue(chunk)
def get_messages(self, user_id=None, room_id=None, pagin_config=None, feedback=False, as_client_event=True): """Get messages in a room. Args: user_id (str): The user requesting messages. room_id (str): The room they want messages from. pagin_config (synapse.api.streams.PaginationConfig): The pagination config rules to apply, if any. feedback (bool): True to get compressed feedback with the messages as_client_event (bool): True to get events in client-server format. Returns: dict: Pagination API results """ yield self.auth.check_joined_room(room_id, user_id) data_source = self.hs.get_event_sources().sources["room"] if not pagin_config.from_token: pagin_config.from_token = ( yield self.hs.get_event_sources().get_current_token(direction='b')) room_token = RoomStreamToken.parse(pagin_config.from_token.room_key) if room_token.topological is None: raise SynapseError(400, "Invalid token") yield self.hs.get_handlers().federation_handler.maybe_backfill( room_id, room_token.topological) user = UserID.from_string(user_id) events, next_key = yield data_source.get_pagination_rows( user, pagin_config.get_source_config("room"), room_id) next_token = pagin_config.from_token.copy_and_replace( "room_key", next_key) time_now = self.clock.time_msec() chunk = { "chunk": [serialize_event(e, time_now, as_client_event) for e in events], "start": pagin_config.from_token.to_string(), "end": next_token.to_string(), } defer.returnValue(chunk)
def on_GET(self, request, room_id, event_id): requester = yield self.auth.get_user_by_req(request, allow_guest=True) limit = parse_integer(request, "limit", default=10) # picking the API shape for symmetry with /messages filter_bytes = parse_string(request, "filter") if filter_bytes: filter_json = urlparse.unquote(filter_bytes) event_filter = Filter(json.loads(filter_json)) else: event_filter = None results = yield self.room_context_handler.get_event_context( requester.user, room_id, event_id, limit, event_filter, ) if not results: raise SynapseError(404, "Event not found.", errcode=Codes.NOT_FOUND) time_now = self.clock.time_msec() results["events_before"] = [ serialize_event(event, time_now) for event in results["events_before"] ] results["event"] = serialize_event(results["event"], time_now) results["events_after"] = [ serialize_event(event, time_now) for event in results["events_after"] ] results["state"] = [ serialize_event(event, time_now) for event in results["state"] ] defer.returnValue((200, results))
def _serialize(self, service, events): time_now = self.clock.time_msec() return [ serialize_event( e, time_now, as_client_event=True, is_invite=(e.type == EventTypes.Member and e.membership == "invite" and service.is_interested_in_user(e.state_key)), ) for e in events ]
def on_GET(self, request, room_id): remote_server = urllib.unquote( request.args["remote"][0]).decode("UTF-8") limit = int(request.args["limit"][0]) handler = self.handlers.federation_handler events = yield handler.backfill(remote_server, room_id, limit) time_now = self.clock.time_msec() res = [serialize_event(event, time_now) for event in events] defer.returnValue((200, res))
def on_GET(self, request, room_id, event_id): requester = yield self.auth.get_user_by_req(request, allow_guest=True) limit = parse_integer(request, "limit", default=10) # picking the API shape for symmetry with /messages filter_bytes = parse_string(request, "filter") if filter_bytes: filter_json = urlparse.unquote(filter_bytes) event_filter = Filter(json.loads(filter_json)) else: event_filter = None results = yield self.room_context_handler.get_event_context( requester.user, room_id, event_id, limit, event_filter, ) if not results: raise SynapseError( 404, "Event not found.", errcode=Codes.NOT_FOUND ) time_now = self.clock.time_msec() results["events_before"] = [ serialize_event(event, time_now) for event in results["events_before"] ] results["event"] = serialize_event(results["event"], time_now) results["events_after"] = [ serialize_event(event, time_now) for event in results["events_after"] ] results["state"] = [ serialize_event(event, time_now) for event in results["state"] ] defer.returnValue((200, results))
def on_GET(self, request, room_id, event_id): user, _, is_guest = yield self.auth.get_user_by_req(request, allow_guest=True) limit = int(request.args.get("limit", [10])[0]) results = yield self.handlers.room_context_handler.get_event_context( user, room_id, event_id, limit, is_guest ) time_now = self.clock.time_msec() results["events_before"] = [ serialize_event(event, time_now) for event in results["events_before"] ] results["events_after"] = [ serialize_event(event, time_now) for event in results["events_after"] ] results["state"] = [ serialize_event(event, time_now) for event in results["state"] ] logger.info("Responding with %r", results) defer.returnValue((200, results))
def on_GET(self, request, room_id): remote_server = urllib.unquote( request.args["remote"][0] ).decode("UTF-8") limit = int(request.args["limit"][0]) handler = self.handlers.federation_handler events = yield handler.backfill(remote_server, room_id, limit) time_now = self.clock.time_msec() res = [serialize_event(event, time_now) for event in events] defer.returnValue((200, res))
def get_stream(self, auth_user_id, pagin_config, timeout=0, as_client_event=True, affect_presence=True, only_room_events=False, room_id=None, is_guest=False): """Fetches the events stream for a given user. If `only_room_events` is `True` only room events will be returned. """ auth_user = UserID.from_string(auth_user_id) try: if affect_presence: yield self.started_stream(auth_user) if timeout: # If they've set a timeout set a minimum limit. timeout = max(timeout, 500) # Add some randomness to this value to try and mitigate against # thundering herds on restart. timeout = random.randint(int(timeout*0.9), int(timeout*1.1)) if is_guest: yield self.distributor.fire( "user_joined_room", user=auth_user, room_id=room_id ) events, tokens = yield self.notifier.get_events_for( auth_user, pagin_config, timeout, only_room_events=only_room_events, is_guest=is_guest, guest_room_id=room_id ) time_now = self.clock.time_msec() chunks = [ serialize_event(e, time_now, as_client_event) for e in events ] chunk = { "chunk": chunks, "start": tokens[0].to_string(), "end": tokens[1].to_string(), } defer.returnValue(chunk) finally: if affect_presence: self.stopped_stream(auth_user)
def _serialize(self, service, events): time_now = self.clock.time_msec() return [ serialize_event( e, time_now, as_client_event=True, # If this is an invite or a knock membership event, and we're interested # in this user, then include any stripped state alongside the event. include_stripped_room_state=( e.type == EventTypes.Member and (e.membership == Membership.INVITE or e.membership == Membership.KNOCK) and service.is_interested_in_user(e.state_key)), ) for e in events ]
def get_state_events(self, user_id, room_id): """Retrieve all state events for a given room. Args: user_id(str): The user requesting state events. room_id(str): The room ID to get all state events from. Returns: A list of dicts representing state events. [{}, {}, {}] """ yield self.auth.check_joined_room(room_id, user_id) # TODO: This is duplicating logic from snapshot_all_rooms current_state = yield self.state_handler.get_current_state(room_id) now = self.clock.time_msec() defer.returnValue( [serialize_event(c, now) for c in current_state.values()])
def get_state_events(self, user_id, room_id): """Retrieve all state events for a given room. Args: user_id(str): The user requesting state events. room_id(str): The room ID to get all state events from. Returns: A list of dicts representing state events. [{}, {}, {}] """ yield self.auth.check_joined_room(room_id, user_id) # TODO: This is duplicating logic from snapshot_all_rooms current_state = yield self.state_handler.get_current_state(room_id) now = self.clock.time_msec() defer.returnValue( [serialize_event(c, now) for c in current_state.values()] )
def _serialize(self, service: "ApplicationService", events: Iterable[EventBase]) -> List[JsonDict]: time_now = self.clock.time_msec() return [ serialize_event( e, time_now, config=SerializeEventConfig( as_client_event=True, # If this is an invite or a knock membership event, and we're interested # in this user, then include any stripped state alongside the event. include_stripped_room_state=( e.type == EventTypes.Member and (e.membership == Membership.INVITE or e.membership == Membership.KNOCK) and service.is_interested_in_user(e.state_key)), ), ) for e in events ]
def get_stream(self, auth_user_id, pagin_config, timeout=0, as_client_event=True, affect_presence=True): auth_user = UserID.from_string(auth_user_id) try: if affect_presence: if auth_user not in self._streams_per_user: self._streams_per_user[auth_user] = 0 if auth_user in self._stop_timer_per_user: try: self.clock.cancel_call_later( self._stop_timer_per_user.pop(auth_user)) except: logger.exception("Failed to cancel event timer") else: yield self.distributor.fire("started_user_eventstream", auth_user) self._streams_per_user[auth_user] += 1 rm_handler = self.hs.get_handlers().room_member_handler room_ids = yield rm_handler.get_joined_rooms_for_user(auth_user) if timeout: # If they've set a timeout set a minimum limit. timeout = max(timeout, 500) # Add some randomness to this value to try and mitigate against # thundering herds on restart. timeout = random.randint(int(timeout * 0.9), int(timeout * 1.1)) events, tokens = yield self.notifier.get_events_for( auth_user, room_ids, pagin_config, timeout) time_now = self.clock.time_msec() chunks = [ serialize_event(e, time_now, as_client_event) for e in events ] chunk = { "chunk": chunks, "start": tokens[0].to_string(), "end": tokens[1].to_string(), } defer.returnValue(chunk) finally: if affect_presence: self._streams_per_user[auth_user] -= 1 if not self._streams_per_user[auth_user]: del self._streams_per_user[auth_user] # 10 seconds of grace to allow the client to reconnect again # before we think they're gone def _later(): logger.debug("_later stopped_user_eventstream %s", auth_user) self._stop_timer_per_user.pop(auth_user, None) return self.distributor.fire( "stopped_user_eventstream", auth_user) logger.debug("Scheduling _later: for %s", auth_user) self._stop_timer_per_user[auth_user] = ( self.clock.call_later(30, _later))
def _room_initial_sync_joined(self, user_id, room_id, pagin_config, membership, is_guest): current_state = yield self.state.get_current_state( room_id=room_id, ) # TODO(paul): I wish I was called with user objects not user_id # strings... auth_user = UserID.from_string(user_id) # TODO: These concurrently time_now = self.clock.time_msec() state = [ serialize_event(x, time_now) for x in current_state.values() ] now_token = yield self.hs.get_event_sources().get_current_token() limit = pagin_config.limit if pagin_config else None if limit is None: limit = 10 room_members = [ m for m in current_state.values() if m.type == EventTypes.Member and m.content["membership"] == Membership.JOIN ] presence_handler = self.hs.get_handlers().presence_handler @defer.inlineCallbacks def get_presence(): states = {} if not is_guest: states = yield presence_handler.get_states( target_users=[UserID.from_string(m.user_id) for m in room_members], auth_user=auth_user, as_event=True, check_auth=False, ) defer.returnValue(states.values()) receipts_handler = self.hs.get_handlers().receipts_handler presence, receipts, (messages, token) = yield defer.gatherResults( [ get_presence(), receipts_handler.get_receipts_for_room(room_id, now_token.receipt_key), self.store.get_recent_events_for_room( room_id, limit=limit, end_token=now_token.room_key, ) ], consumeErrors=True, ).addErrback(unwrapFirstError) messages = yield self._filter_events_for_client( user_id, messages, is_guest=is_guest, require_all_visible_for_guests=False ) start_token = now_token.copy_and_replace("room_key", token[0]) end_token = now_token.copy_and_replace("room_key", token[1]) time_now = self.clock.time_msec() ret = { "room_id": room_id, "messages": { "chunk": [serialize_event(m, time_now) for m in messages], "start": start_token.to_string(), "end": end_token.to_string(), }, "state": state, "presence": presence, "receipts": receipts, } if not is_guest: ret["membership"] = membership defer.returnValue(ret)
def _serialize(self, events): time_now = self.clock.time_msec() return [ serialize_event(e, time_now, as_client_event=True) for e in events ]
def get_messages(self, user_id=None, room_id=None, pagin_config=None, as_client_event=True, is_guest=False): """Get messages in a room. Args: user_id (str): The user requesting messages. room_id (str): The room they want messages from. pagin_config (synapse.api.streams.PaginationConfig): The pagination config rules to apply, if any. as_client_event (bool): True to get events in client-server format. is_guest (bool): Whether the requesting user is a guest (as opposed to a fully registered user). Returns: dict: Pagination API results """ data_source = self.hs.get_event_sources().sources["room"] if pagin_config.from_token: room_token = pagin_config.from_token.room_key else: pagin_config.from_token = ( yield self.hs.get_event_sources().get_current_token( direction='b' ) ) room_token = pagin_config.from_token.room_key room_token = RoomStreamToken.parse(room_token) if room_token.topological is None: raise SynapseError(400, "Invalid token") pagin_config.from_token = pagin_config.from_token.copy_and_replace( "room_key", str(room_token) ) source_config = pagin_config.get_source_config("room") if not is_guest: member_event = yield self.auth.check_user_was_in_room(room_id, user_id) if member_event.membership == Membership.LEAVE: # If they have left the room then clamp the token to be before # they left the room. # If they're a guest, we'll just 403 them if they're asking for # events they can't see. leave_token = yield self.store.get_topological_token_for_event( member_event.event_id ) leave_token = RoomStreamToken.parse(leave_token) if leave_token.topological < room_token.topological: source_config.from_key = str(leave_token) if source_config.direction == "f": if source_config.to_key is None: source_config.to_key = str(leave_token) else: to_token = RoomStreamToken.parse(source_config.to_key) if leave_token.topological < to_token.topological: source_config.to_key = str(leave_token) yield self.hs.get_handlers().federation_handler.maybe_backfill( room_id, room_token.topological ) user = UserID.from_string(user_id) events, next_key = yield data_source.get_pagination_rows( user, source_config, room_id ) next_token = pagin_config.from_token.copy_and_replace( "room_key", next_key ) if not events: defer.returnValue({ "chunk": [], "start": pagin_config.from_token.to_string(), "end": next_token.to_string(), }) events = yield self._filter_events_for_client(user_id, events, is_guest=is_guest) time_now = self.clock.time_msec() chunk = { "chunk": [ serialize_event(e, time_now, as_client_event) for e in events ], "start": pagin_config.from_token.to_string(), "end": next_token.to_string(), } defer.returnValue(chunk)
def get_messages(self, requester, room_id=None, pagin_config=None, as_client_event=True): """Get messages in a room. Args: requester (Requester): The user requesting messages. room_id (str): The room they want messages from. pagin_config (synapse.api.streams.PaginationConfig): The pagination config rules to apply, if any. as_client_event (bool): True to get events in client-server format. Returns: dict: Pagination API results """ user_id = requester.user.to_string() data_source = self.hs.get_event_sources().sources["room"] if pagin_config.from_token: room_token = pagin_config.from_token.room_key else: pagin_config.from_token = ( yield self.hs.get_event_sources().get_current_token( direction='b' ) ) room_token = pagin_config.from_token.room_key room_token = RoomStreamToken.parse(room_token) pagin_config.from_token = pagin_config.from_token.copy_and_replace( "room_key", str(room_token) ) source_config = pagin_config.get_source_config("room") membership, member_event_id = yield self._check_in_room_or_world_readable( room_id, user_id ) if source_config.direction == 'b': # if we're going backwards, we might need to backfill. This # requires that we have a topo token. if room_token.topological: max_topo = room_token.topological else: max_topo = yield self.store.get_max_topological_token_for_stream_and_room( room_id, room_token.stream ) if membership == Membership.LEAVE: # If they have left the room then clamp the token to be before # they left the room, to save the effort of loading from the # database. leave_token = yield self.store.get_topological_token_for_event( member_event_id ) leave_token = RoomStreamToken.parse(leave_token) if leave_token.topological < max_topo: source_config.from_key = str(leave_token) yield self.hs.get_handlers().federation_handler.maybe_backfill( room_id, max_topo ) events, next_key = yield data_source.get_pagination_rows( requester.user, source_config, room_id ) next_token = pagin_config.from_token.copy_and_replace( "room_key", next_key ) if not events: defer.returnValue({ "chunk": [], "start": pagin_config.from_token.to_string(), "end": next_token.to_string(), }) events = yield self._filter_events_for_client( user_id, events, is_peeking=(member_event_id is None), ) time_now = self.clock.time_msec() chunk = { "chunk": [ serialize_event(e, time_now, as_client_event) for e in events ], "start": pagin_config.from_token.to_string(), "end": next_token.to_string(), } defer.returnValue(chunk)
def search(self, user, content, batch=None): """Performs a full text search for a user. Args: user (UserID) content (dict): Search parameters batch (str): The next_batch parameter. Used for pagination. Returns: dict to be returned to the client with results of search """ batch_group = None batch_group_key = None batch_token = None if batch: try: b = decode_base64(batch) batch_group, batch_group_key, batch_token = b.split("\n") assert batch_group is not None assert batch_group_key is not None assert batch_token is not None except Exception: raise SynapseError(400, "Invalid batch") try: room_cat = content["search_categories"]["room_events"] # The actual thing to query in FTS search_term = room_cat["search_term"] # Which "keys" to search over in FTS query keys = room_cat.get("keys", [ "content.body", "content.name", "content.topic", ]) # Filter to apply to results filter_dict = room_cat.get("filter", {}) # What to order results by (impacts whether pagination can be doen) order_by = room_cat.get("order_by", "rank") # Return the current state of the rooms? include_state = room_cat.get("include_state", False) # Include context around each event? event_context = room_cat.get( "event_context", None ) # Group results together? May allow clients to paginate within a # group group_by = room_cat.get("groupings", {}).get("group_by", {}) group_keys = [g["key"] for g in group_by] if event_context is not None: before_limit = int(event_context.get( "before_limit", 5 )) after_limit = int(event_context.get( "after_limit", 5 )) # Return the historic display name and avatar for the senders # of the events? include_profile = bool(event_context.get("include_profile", False)) except KeyError: raise SynapseError(400, "Invalid search query") if order_by not in ("rank", "recent"): raise SynapseError(400, "Invalid order by: %r" % (order_by,)) if set(group_keys) - {"room_id", "sender"}: raise SynapseError( 400, "Invalid group by keys: %r" % (set(group_keys) - {"room_id", "sender"},) ) search_filter = Filter(filter_dict) # TODO: Search through left rooms too rooms = yield self.store.get_rooms_for_user_where_membership_is( user.to_string(), membership_list=[Membership.JOIN], # membership_list=[Membership.JOIN, Membership.LEAVE, Membership.Ban], ) room_ids = set(r.room_id for r in rooms) room_ids = search_filter.filter_rooms(room_ids) if batch_group == "room_id": room_ids.intersection_update({batch_group_key}) if not room_ids: defer.returnValue({ "search_categories": { "room_events": { "results": [], "count": 0, "highlights": [], } } }) rank_map = {} # event_id -> rank of event allowed_events = [] room_groups = {} # Holds result of grouping by room, if applicable sender_group = {} # Holds result of grouping by sender, if applicable # Holds the next_batch for the entire result set if one of those exists global_next_batch = None highlights = set() count = None if order_by == "rank": search_result = yield self.store.search_msgs( room_ids, search_term, keys ) count = search_result["count"] if search_result["highlights"]: highlights.update(search_result["highlights"]) results = search_result["results"] results_map = {r["event"].event_id: r for r in results} rank_map.update({r["event"].event_id: r["rank"] for r in results}) filtered_events = search_filter.filter([r["event"] for r in results]) events = yield filter_events_for_client( self.store, user.to_string(), filtered_events ) events.sort(key=lambda e: -rank_map[e.event_id]) allowed_events = events[:search_filter.limit()] for e in allowed_events: rm = room_groups.setdefault(e.room_id, { "results": [], "order": rank_map[e.event_id], }) rm["results"].append(e.event_id) s = sender_group.setdefault(e.sender, { "results": [], "order": rank_map[e.event_id], }) s["results"].append(e.event_id) elif order_by == "recent": room_events = [] i = 0 pagination_token = batch_token # We keep looping and we keep filtering until we reach the limit # or we run out of things. # But only go around 5 times since otherwise synapse will be sad. while len(room_events) < search_filter.limit() and i < 5: i += 1 search_result = yield self.store.search_rooms( room_ids, search_term, keys, search_filter.limit() * 2, pagination_token=pagination_token, ) if search_result["highlights"]: highlights.update(search_result["highlights"]) count = search_result["count"] results = search_result["results"] results_map = {r["event"].event_id: r for r in results} rank_map.update({r["event"].event_id: r["rank"] for r in results}) filtered_events = search_filter.filter([ r["event"] for r in results ]) events = yield filter_events_for_client( self.store, user.to_string(), filtered_events ) room_events.extend(events) room_events = room_events[:search_filter.limit()] if len(results) < search_filter.limit() * 2: pagination_token = None break else: pagination_token = results[-1]["pagination_token"] for event in room_events: group = room_groups.setdefault(event.room_id, { "results": [], }) group["results"].append(event.event_id) if room_events and len(room_events) >= search_filter.limit(): last_event_id = room_events[-1].event_id pagination_token = results_map[last_event_id]["pagination_token"] # We want to respect the given batch group and group keys so # that if people blindly use the top level `next_batch` token # it returns more from the same group (if applicable) rather # than reverting to searching all results again. if batch_group and batch_group_key: global_next_batch = encode_base64("%s\n%s\n%s" % ( batch_group, batch_group_key, pagination_token )) else: global_next_batch = encode_base64("%s\n%s\n%s" % ( "all", "", pagination_token )) for room_id, group in room_groups.items(): group["next_batch"] = encode_base64("%s\n%s\n%s" % ( "room_id", room_id, pagination_token )) allowed_events.extend(room_events) else: # We should never get here due to the guard earlier. raise NotImplementedError() # If client has asked for "context" for each event (i.e. some surrounding # events and state), fetch that if event_context is not None: now_token = yield self.hs.get_event_sources().get_current_token() contexts = {} for event in allowed_events: res = yield self.store.get_events_around( event.room_id, event.event_id, before_limit, after_limit ) res["events_before"] = yield filter_events_for_client( self.store, user.to_string(), res["events_before"] ) res["events_after"] = yield filter_events_for_client( self.store, user.to_string(), res["events_after"] ) res["start"] = now_token.copy_and_replace( "room_key", res["start"] ).to_string() res["end"] = now_token.copy_and_replace( "room_key", res["end"] ).to_string() if include_profile: senders = set( ev.sender for ev in itertools.chain( res["events_before"], [event], res["events_after"] ) ) if res["events_after"]: last_event_id = res["events_after"][-1].event_id else: last_event_id = event.event_id state = yield self.store.get_state_for_event( last_event_id, types=[(EventTypes.Member, sender) for sender in senders] ) res["profile_info"] = { s.state_key: { "displayname": s.content.get("displayname", None), "avatar_url": s.content.get("avatar_url", None), } for s in state.values() if s.type == EventTypes.Member and s.state_key in senders } contexts[event.event_id] = res else: contexts = {} # TODO: Add a limit time_now = self.clock.time_msec() for context in contexts.values(): context["events_before"] = [ serialize_event(e, time_now) for e in context["events_before"] ] context["events_after"] = [ serialize_event(e, time_now) for e in context["events_after"] ] state_results = {} if include_state: rooms = set(e.room_id for e in allowed_events) for room_id in rooms: state = yield self.state_handler.get_current_state(room_id) state_results[room_id] = list(state.values()) state_results.values() # We're now about to serialize the events. We should not make any # blocking calls after this. Otherwise the 'age' will be wrong results = [ { "rank": rank_map[e.event_id], "result": serialize_event(e, time_now), "context": contexts.get(e.event_id, {}), } for e in allowed_events ] rooms_cat_res = { "results": results, "count": count, "highlights": list(highlights), } if state_results: rooms_cat_res["state"] = { room_id: [serialize_event(e, time_now) for e in state] for room_id, state in state_results.items() } if room_groups and "room_id" in group_keys: rooms_cat_res.setdefault("groups", {})["room_id"] = room_groups if sender_group and "sender" in group_keys: rooms_cat_res.setdefault("groups", {})["sender"] = sender_group if global_next_batch: rooms_cat_res["next_batch"] = global_next_batch defer.returnValue({ "search_categories": { "room_events": rooms_cat_res } })
def serialize(self, ev, fields): return serialize_event(ev, 1479807801915, only_event_fields=fields)
def search(self, user, content, batch=None): """Performs a full text search for a user. Args: user (UserID) content (dict): Search parameters batch (str): The next_batch parameter. Used for pagination. Returns: dict to be returned to the client with results of search """ batch_group = None batch_group_key = None batch_token = None if batch: try: b = decode_base64(batch) batch_group, batch_group_key, batch_token = b.split("\n") assert batch_group is not None assert batch_group_key is not None assert batch_token is not None except: raise SynapseError(400, "Invalid batch") try: room_cat = content["search_categories"]["room_events"] # The actual thing to query in FTS search_term = room_cat["search_term"] # Which "keys" to search over in FTS query keys = room_cat.get("keys", [ "content.body", "content.name", "content.topic", ]) # Filter to apply to results filter_dict = room_cat.get("filter", {}) # What to order results by (impacts whether pagination can be doen) order_by = room_cat.get("order_by", "rank") # Return the current state of the rooms? include_state = room_cat.get("include_state", False) # Include context around each event? event_context = room_cat.get( "event_context", None ) # Group results together? May allow clients to paginate within a # group group_by = room_cat.get("groupings", {}).get("group_by", {}) group_keys = [g["key"] for g in group_by] if event_context is not None: before_limit = int(event_context.get( "before_limit", 5 )) after_limit = int(event_context.get( "after_limit", 5 )) # Return the historic display name and avatar for the senders # of the events? include_profile = bool(event_context.get("include_profile", False)) except KeyError: raise SynapseError(400, "Invalid search query") if order_by not in ("rank", "recent"): raise SynapseError(400, "Invalid order by: %r" % (order_by,)) if set(group_keys) - {"room_id", "sender"}: raise SynapseError( 400, "Invalid group by keys: %r" % (set(group_keys) - {"room_id", "sender"},) ) search_filter = Filter(filter_dict) # TODO: Search through left rooms too rooms = yield self.store.get_rooms_for_user_where_membership_is( user.to_string(), membership_list=[Membership.JOIN], # membership_list=[Membership.JOIN, Membership.LEAVE, Membership.Ban], ) room_ids = set(r.room_id for r in rooms) room_ids = search_filter.filter_rooms(room_ids) if batch_group == "room_id": room_ids.intersection_update({batch_group_key}) rank_map = {} # event_id -> rank of event allowed_events = [] room_groups = {} # Holds result of grouping by room, if applicable sender_group = {} # Holds result of grouping by sender, if applicable # Holds the next_batch for the entire result set if one of those exists global_next_batch = None if order_by == "rank": results = yield self.store.search_msgs( room_ids, search_term, keys ) results_map = {r["event"].event_id: r for r in results} rank_map.update({r["event"].event_id: r["rank"] for r in results}) filtered_events = search_filter.filter([r["event"] for r in results]) events = yield self._filter_events_for_client( user.to_string(), filtered_events ) events.sort(key=lambda e: -rank_map[e.event_id]) allowed_events = events[:search_filter.limit()] for e in allowed_events: rm = room_groups.setdefault(e.room_id, { "results": [], "order": rank_map[e.event_id], }) rm["results"].append(e.event_id) s = sender_group.setdefault(e.sender, { "results": [], "order": rank_map[e.event_id], }) s["results"].append(e.event_id) elif order_by == "recent": # In this case we specifically loop through each room as the given # limit applies to each room, rather than a global list. # This is not necessarilly a good idea. for room_id in room_ids: room_events = [] if batch_group == "room_id" and batch_group_key == room_id: pagination_token = batch_token else: pagination_token = None i = 0 # We keep looping and we keep filtering until we reach the limit # or we run out of things. # But only go around 5 times since otherwise synapse will be sad. while len(room_events) < search_filter.limit() and i < 5: i += 1 results = yield self.store.search_room( room_id, search_term, keys, search_filter.limit() * 2, pagination_token=pagination_token, ) results_map = {r["event"].event_id: r for r in results} rank_map.update({r["event"].event_id: r["rank"] for r in results}) filtered_events = search_filter.filter([ r["event"] for r in results ]) events = yield self._filter_events_for_client( user.to_string(), filtered_events ) room_events.extend(events) room_events = room_events[:search_filter.limit()] if len(results) < search_filter.limit() * 2: pagination_token = None break else: pagination_token = results[-1]["pagination_token"] if room_events: res = results_map[room_events[-1].event_id] pagination_token = res["pagination_token"] group = room_groups.setdefault(room_id, {}) if pagination_token: next_batch = encode_base64("%s\n%s\n%s" % ( "room_id", room_id, pagination_token )) group["next_batch"] = next_batch if batch_token: global_next_batch = next_batch group["results"] = [e.event_id for e in room_events] group["order"] = max( e.origin_server_ts/1000 for e in room_events if hasattr(e, "origin_server_ts") ) allowed_events.extend(room_events) # Normalize the group orders if room_groups: if len(room_groups) > 1: mx = max(g["order"] for g in room_groups.values()) mn = min(g["order"] for g in room_groups.values()) for g in room_groups.values(): g["order"] = (g["order"] - mn) * 1.0 / (mx - mn) else: room_groups.values()[0]["order"] = 1 else: # We should never get here due to the guard earlier. raise NotImplementedError() # If client has asked for "context" for each event (i.e. some surrounding # events and state), fetch that if event_context is not None: now_token = yield self.hs.get_event_sources().get_current_token() contexts = {} for event in allowed_events: res = yield self.store.get_events_around( event.room_id, event.event_id, before_limit, after_limit ) res["events_before"] = yield self._filter_events_for_client( user.to_string(), res["events_before"] ) res["events_after"] = yield self._filter_events_for_client( user.to_string(), res["events_after"] ) res["start"] = now_token.copy_and_replace( "room_key", res["start"] ).to_string() res["end"] = now_token.copy_and_replace( "room_key", res["end"] ).to_string() if include_profile: senders = set( ev.sender for ev in itertools.chain( res["events_before"], [event], res["events_after"] ) ) if res["events_after"]: last_event_id = res["events_after"][-1].event_id else: last_event_id = event.event_id state = yield self.store.get_state_for_event( last_event_id, types=[(EventTypes.Member, sender) for sender in senders] ) res["profile_info"] = { s.state_key: { "displayname": s.content.get("displayname", None), "avatar_url": s.content.get("avatar_url", None), } for s in state.values() if s.type == EventTypes.Member and s.state_key in senders } contexts[event.event_id] = res else: contexts = {} # TODO: Add a limit time_now = self.clock.time_msec() for context in contexts.values(): context["events_before"] = [ serialize_event(e, time_now) for e in context["events_before"] ] context["events_after"] = [ serialize_event(e, time_now) for e in context["events_after"] ] state_results = {} if include_state: rooms = set(e.room_id for e in allowed_events) for room_id in rooms: state = yield self.state_handler.get_current_state(room_id) state_results[room_id] = state.values() state_results.values() # We're now about to serialize the events. We should not make any # blocking calls after this. Otherwise the 'age' will be wrong results = { e.event_id: { "rank": rank_map[e.event_id], "result": serialize_event(e, time_now), "context": contexts.get(e.event_id, {}), } for e in allowed_events } logger.info("Found %d results", len(results)) rooms_cat_res = { "results": results, "count": len(results) } if state_results: rooms_cat_res["state"] = { room_id: [serialize_event(e, time_now) for e in state] for room_id, state in state_results.items() } if room_groups and "room_id" in group_keys: rooms_cat_res.setdefault("groups", {})["room_id"] = room_groups if sender_group and "sender" in group_keys: rooms_cat_res.setdefault("groups", {})["sender"] = sender_group if global_next_batch: rooms_cat_res["next_batch"] = global_next_batch defer.returnValue({ "search_categories": { "room_events": rooms_cat_res } })
def handle_room(event): d = { "room_id": event.room_id, "membership": event.membership, "visibility": ("public" if event.room_id in public_room_ids else "private"), } if event.membership == Membership.INVITE: time_now = self.clock.time_msec() d["inviter"] = event.sender invite_event = yield self.store.get_event(event.event_id) d["invite"] = serialize_event(invite_event, time_now, as_client_event) rooms_ret.append(d) if event.membership not in (Membership.JOIN, Membership.LEAVE): return try: if event.membership == Membership.JOIN: room_end_token = now_token.room_key deferred_room_state = self.state_handler.get_current_state( event.room_id) elif event.membership == Membership.LEAVE: room_end_token = "s%d" % (event.stream_ordering, ) deferred_room_state = self.store.get_state_for_events( [event.event_id], None) deferred_room_state.addCallback( lambda states: states[event.event_id]) (messages, token), current_state = yield make_deferred_yieldable( defer.gatherResults([ preserve_fn(self.store.get_recent_events_for_room)( event.room_id, limit=limit, end_token=room_end_token, ), deferred_room_state, ])).addErrback(unwrapFirstError) messages = yield filter_events_for_client( self.store, user_id, messages) start_token = now_token.copy_and_replace("room_key", token[0]) end_token = now_token.copy_and_replace("room_key", token[1]) time_now = self.clock.time_msec() d["messages"] = { "chunk": [ serialize_event(m, time_now, as_client_event) for m in messages ], "start": start_token.to_string(), "end": end_token.to_string(), } d["state"] = [ serialize_event(c, time_now, as_client_event) for c in current_state.values() ] account_data_events = [] tags = tags_by_room.get(event.room_id) if tags: account_data_events.append({ "type": "m.tag", "content": { "tags": tags }, }) account_data = account_data_by_room.get(event.room_id, {}) for account_data_type, content in account_data.items(): account_data_events.append({ "type": account_data_type, "content": content, }) d["account_data"] = account_data_events except Exception: logger.exception("Failed to get snapshot")
def handle_room(event): d = { "room_id": event.room_id, "membership": event.membership, "visibility": ( "public" if event.room_id in public_room_ids else "private" ), } if event.membership == Membership.INVITE: time_now = self.clock.time_msec() d["inviter"] = event.sender invite_event = yield self.store.get_event(event.event_id) d["invite"] = serialize_event(invite_event, time_now, as_client_event) rooms_ret.append(d) if event.membership not in (Membership.JOIN, Membership.LEAVE): return try: if event.membership == Membership.JOIN: room_end_token = now_token.room_key deferred_room_state = self.state_handler.get_current_state( event.room_id ) elif event.membership == Membership.LEAVE: room_end_token = "s%d" % (event.stream_ordering,) deferred_room_state = self.store.get_state_for_events( [event.event_id], None ) deferred_room_state.addCallback( lambda states: states[event.event_id] ) (messages, token), current_state = yield defer.gatherResults( [ self.store.get_recent_events_for_room( event.room_id, limit=limit, end_token=room_end_token, ), deferred_room_state, ] ).addErrback(unwrapFirstError) messages = yield self._filter_events_for_client( user_id, messages ) start_token = now_token.copy_and_replace("room_key", token[0]) end_token = now_token.copy_and_replace("room_key", token[1]) time_now = self.clock.time_msec() d["messages"] = { "chunk": [ serialize_event(m, time_now, as_client_event) for m in messages ], "start": start_token.to_string(), "end": end_token.to_string(), } d["state"] = [ serialize_event(c, time_now, as_client_event) for c in current_state.values() ] account_data_events = [] tags = tags_by_room.get(event.room_id) if tags: account_data_events.append({ "type": "m.tag", "content": {"tags": tags}, }) account_data = account_data_by_room.get(event.room_id, {}) for account_data_type, content in account_data.items(): account_data_events.append({ "type": account_data_type, "content": content, }) d["account_data"] = account_data_events except: logger.exception("Failed to get snapshot")
def get_stream(self, auth_user_id, pagin_config, timeout=0, as_client_event=True, affect_presence=True, only_keys=None, room_id=None, is_guest=False): """Fetches the events stream for a given user. If `only_keys` is not None, events from keys will be sent down. """ auth_user = UserID.from_string(auth_user_id) presence_handler = self.hs.get_presence_handler() context = yield presence_handler.user_syncing( auth_user_id, affect_presence=affect_presence, ) with context: if timeout: # If they've set a timeout set a minimum limit. timeout = max(timeout, 500) # Add some randomness to this value to try and mitigate against # thundering herds on restart. timeout = random.randint(int(timeout * 0.9), int(timeout * 1.1)) events, tokens = yield self.notifier.get_events_for( auth_user, pagin_config, timeout, only_keys=only_keys, is_guest=is_guest, explicit_room_id=room_id ) # When the user joins a new room, or another user joins a currently # joined room, we need to send down presence for those users. to_add = [] for event in events: if not isinstance(event, EventBase): continue if event.type == EventTypes.Member: if event.membership != Membership.JOIN: continue # Send down presence. if event.state_key == auth_user_id: # Send down presence for everyone in the room. users = yield self.state.get_current_user_in_room(event.room_id) states = yield presence_handler.get_states( users, as_event=True, ) to_add.extend(states) else: ev = yield presence_handler.get_state( UserID.from_string(event.state_key), as_event=True, ) to_add.append(ev) events.extend(to_add) time_now = self.clock.time_msec() chunks = [ serialize_event(e, time_now, as_client_event) for e in events ] chunk = { "chunk": chunks, "start": tokens[0].to_string(), "end": tokens[1].to_string(), } defer.returnValue(chunk)
def get_messages(self, requester, room_id=None, pagin_config=None, as_client_event=True, event_filter=None): """Get messages in a room. Args: requester (Requester): The user requesting messages. room_id (str): The room they want messages from. pagin_config (synapse.api.streams.PaginationConfig): The pagination config rules to apply, if any. as_client_event (bool): True to get events in client-server format. event_filter (Filter): Filter to apply to results or None Returns: dict: Pagination API results """ user_id = requester.user.to_string() if pagin_config.from_token: room_token = pagin_config.from_token.room_key else: pagin_config.from_token = ( yield self.hs.get_event_sources().get_current_token_for_room( room_id=room_id ) ) room_token = pagin_config.from_token.room_key room_token = RoomStreamToken.parse(room_token) pagin_config.from_token = pagin_config.from_token.copy_and_replace( "room_key", str(room_token) ) source_config = pagin_config.get_source_config("room") with (yield self.pagination_lock.read(room_id)): membership, member_event_id = yield self.auth.check_in_room_or_world_readable( room_id, user_id ) if source_config.direction == 'b': # if we're going backwards, we might need to backfill. This # requires that we have a topo token. if room_token.topological: max_topo = room_token.topological else: max_topo = yield self.store.get_max_topological_token( room_id, room_token.stream ) if membership == Membership.LEAVE: # If they have left the room then clamp the token to be before # they left the room, to save the effort of loading from the # database. leave_token = yield self.store.get_topological_token_for_event( member_event_id ) leave_token = RoomStreamToken.parse(leave_token) if leave_token.topological < max_topo: source_config.from_key = str(leave_token) yield self.hs.get_handlers().federation_handler.maybe_backfill( room_id, max_topo ) events, next_key = yield self.store.paginate_room_events( room_id=room_id, from_key=source_config.from_key, to_key=source_config.to_key, direction=source_config.direction, limit=source_config.limit, event_filter=event_filter, ) next_token = pagin_config.from_token.copy_and_replace( "room_key", next_key ) if events: if event_filter: events = event_filter.filter(events) events = yield filter_events_for_client( self.store, user_id, events, is_peeking=(member_event_id is None), ) if not events: defer.returnValue({ "chunk": [], "start": pagin_config.from_token.to_string(), "end": next_token.to_string(), }) state = None if event_filter and event_filter.lazy_load_members(): # TODO: remove redundant members # FIXME: we also care about invite targets etc. state_filter = StateFilter.from_types( (EventTypes.Member, event.sender) for event in events ) state_ids = yield self.store.get_state_ids_for_event( events[0].event_id, state_filter=state_filter, ) if state_ids: state = yield self.store.get_events(list(state_ids.values())) state = state.values() time_now = self.clock.time_msec() chunk = { "chunk": [ serialize_event(e, time_now, as_client_event) for e in events ], "start": pagin_config.from_token.to_string(), "end": next_token.to_string(), } if state: chunk["state"] = [ serialize_event(e, time_now, as_client_event) for e in state ] defer.returnValue(chunk)
def get_state_events( self, user_id, room_id, types=None, filtered_types=None, at_token=None, is_guest=False, ): """Retrieve all state events for a given room. If the user is joined to the room then return the current state. If the user has left the room return the state events from when they left. If an explicit 'at' parameter is passed, return the state events as of that event, if visible. Args: user_id(str): The user requesting state events. room_id(str): The room ID to get all state events from. types(list[(str, str|None)]|None): List of (type, state_key) tuples which are used to filter the state fetched. If `state_key` is None, all events are returned of the given type. May be None, which matches any key. filtered_types(list[str]|None): Only apply filtering via `types` to this list of event types. Other types of events are returned unfiltered. If None, `types` filtering is applied to all events. at_token(StreamToken|None): the stream token of the at which we are requesting the stats. If the user is not allowed to view the state as of that stream token, we raise a 403 SynapseError. If None, returns the current state based on the current_state_events table. is_guest(bool): whether this user is a guest Returns: A list of dicts representing state events. [{}, {}, {}] Raises: NotFoundError (404) if the at token does not yield an event AuthError (403) if the user doesn't have permission to view members of this room. """ if at_token: # FIXME this claims to get the state at a stream position, but # get_recent_events_for_room operates by topo ordering. This therefore # does not reliably give you the state at the given stream position. # (https://github.com/matrix-org/synapse/issues/3305) last_events, _ = yield self.store.get_recent_events_for_room( room_id, end_token=at_token.room_key, limit=1, ) if not last_events: raise NotFoundError("Can't find event for token %s" % (at_token, )) visible_events = yield filter_events_for_client( self.store, user_id, last_events, ) event = last_events[0] if visible_events: room_state = yield self.store.get_state_for_events( [event.event_id], types, filtered_types=filtered_types, ) room_state = room_state[event.event_id] else: raise AuthError( 403, "User %s not allowed to view events in room %s at token %s" % ( user_id, room_id, at_token, )) else: membership, membership_event_id = ( yield self.auth.check_in_room_or_world_readable( room_id, user_id, )) if membership == Membership.JOIN: state_ids = yield self.store.get_filtered_current_state_ids( room_id, types, filtered_types=filtered_types, ) room_state = yield self.store.get_events(state_ids.values()) elif membership == Membership.LEAVE: room_state = yield self.store.get_state_for_events( [membership_event_id], types, filtered_types=filtered_types, ) room_state = room_state[membership_event_id] now = self.clock.time_msec() defer.returnValue( [serialize_event(c, now) for c in room_state.values()])
def room_initial_sync(self, user_id, room_id, pagin_config=None, feedback=False): current_state = yield self.state.get_current_state(room_id=room_id, ) yield self.auth.check_joined_room(room_id, user_id, current_state=current_state) # TODO(paul): I wish I was called with user objects not user_id # strings... auth_user = UserID.from_string(user_id) # TODO: These concurrently time_now = self.clock.time_msec() state = [serialize_event(x, time_now) for x in current_state.values()] member_event = current_state.get(( EventTypes.Member, user_id, )) now_token = yield self.hs.get_event_sources().get_current_token() limit = pagin_config.limit if pagin_config else None if limit is None: limit = 10 room_members = [ m for m in current_state.values() if m.type == EventTypes.Member and m.content["membership"] == Membership.JOIN ] presence_handler = self.hs.get_handlers().presence_handler @defer.inlineCallbacks def get_presence(): states = yield presence_handler.get_states( target_users=[ UserID.from_string(m.user_id) for m in room_members ], auth_user=auth_user, as_event=True, check_auth=False, ) defer.returnValue(states.values()) receipts_handler = self.hs.get_handlers().receipts_handler presence, receipts, (messages, token) = yield defer.gatherResults( [ get_presence(), receipts_handler.get_receipts_for_room(room_id, now_token.receipt_key), self.store.get_recent_events_for_room( room_id, limit=limit, end_token=now_token.room_key, ) ], consumeErrors=True, ).addErrback(unwrapFirstError) messages = yield self._filter_events_for_client( user_id, room_id, messages) start_token = now_token.copy_and_replace("room_key", token[0]) end_token = now_token.copy_and_replace("room_key", token[1]) time_now = self.clock.time_msec() defer.returnValue({ "membership": member_event.membership, "room_id": room_id, "messages": { "chunk": [serialize_event(m, time_now) for m in messages], "start": start_token.to_string(), "end": end_token.to_string(), }, "state": state, "presence": presence, "receipts": receipts, })
def get_state_events( self, user_id, room_id, state_filter=StateFilter.all(), at_token=None, is_guest=False, ): """Retrieve all state events for a given room. If the user is joined to the room then return the current state. If the user has left the room return the state events from when they left. If an explicit 'at' parameter is passed, return the state events as of that event, if visible. Args: user_id(str): The user requesting state events. room_id(str): The room ID to get all state events from. state_filter (StateFilter): The state filter used to fetch state from the database. at_token(StreamToken|None): the stream token of the at which we are requesting the stats. If the user is not allowed to view the state as of that stream token, we raise a 403 SynapseError. If None, returns the current state based on the current_state_events table. is_guest(bool): whether this user is a guest Returns: A list of dicts representing state events. [{}, {}, {}] Raises: NotFoundError (404) if the at token does not yield an event AuthError (403) if the user doesn't have permission to view members of this room. """ if at_token: # FIXME this claims to get the state at a stream position, but # get_recent_events_for_room operates by topo ordering. This therefore # does not reliably give you the state at the given stream position. # (https://github.com/matrix-org/synapse/issues/3305) last_events, _ = yield self.store.get_recent_events_for_room( room_id, end_token=at_token.room_key, limit=1, ) if not last_events: raise NotFoundError("Can't find event for token %s" % (at_token, )) visible_events = yield filter_events_for_client( self.store, user_id, last_events, ) event = last_events[0] if visible_events: room_state = yield self.store.get_state_for_events( [event.event_id], state_filter=state_filter, ) room_state = room_state[event.event_id] else: raise AuthError( 403, "User %s not allowed to view events in room %s at token %s" % ( user_id, room_id, at_token, ) ) else: membership, membership_event_id = ( yield self.auth.check_in_room_or_world_readable( room_id, user_id, ) ) if membership == Membership.JOIN: state_ids = yield self.store.get_filtered_current_state_ids( room_id, state_filter=state_filter, ) room_state = yield self.store.get_events(state_ids.values()) elif membership == Membership.LEAVE: room_state = yield self.store.get_state_for_events( [membership_event_id], state_filter=state_filter, ) room_state = room_state[membership_event_id] now = self.clock.time_msec() defer.returnValue( [serialize_event(c, now) for c in room_state.values()] )
def get_stream(self, auth_user_id, pagin_config, timeout=0, as_client_event=True, affect_presence=True, only_room_events=False): """Fetches the events stream for a given user. If `only_room_events` is `True` only room events will be returned. """ auth_user = UserID.from_string(auth_user_id) try: if affect_presence: if auth_user not in self._streams_per_user: self._streams_per_user[auth_user] = 0 if auth_user in self._stop_timer_per_user: try: self.clock.cancel_call_later( self._stop_timer_per_user.pop(auth_user) ) except: logger.exception("Failed to cancel event timer") else: yield self.distributor.fire( "started_user_eventstream", auth_user ) self._streams_per_user[auth_user] += 1 rm_handler = self.hs.get_handlers().room_member_handler app_service = yield self.store.get_app_service_by_user_id( auth_user.to_string() ) if app_service: rooms = yield self.store.get_app_service_rooms(app_service) room_ids = set(r.room_id for r in rooms) else: room_ids = yield rm_handler.get_joined_rooms_for_user(auth_user) if timeout: # If they've set a timeout set a minimum limit. timeout = max(timeout, 500) # Add some randomness to this value to try and mitigate against # thundering herds on restart. timeout = random.randint(int(timeout*0.9), int(timeout*1.1)) events, tokens = yield self.notifier.get_events_for( auth_user, room_ids, pagin_config, timeout, only_room_events=only_room_events ) time_now = self.clock.time_msec() chunks = [ serialize_event(e, time_now, as_client_event) for e in events ] chunk = { "chunk": chunks, "start": tokens[0].to_string(), "end": tokens[1].to_string(), } defer.returnValue(chunk) finally: if affect_presence: self._streams_per_user[auth_user] -= 1 if not self._streams_per_user[auth_user]: del self._streams_per_user[auth_user] # 10 seconds of grace to allow the client to reconnect again # before we think they're gone def _later(): logger.debug( "_later stopped_user_eventstream %s", auth_user ) self._stop_timer_per_user.pop(auth_user, None) return self.distributor.fire( "stopped_user_eventstream", auth_user ) logger.debug("Scheduling _later: for %s", auth_user) self._stop_timer_per_user[auth_user] = ( self.clock.call_later(30, _later) )
def get_messages(self, requester, room_id=None, pagin_config=None, as_client_event=True, event_filter=None): """Get messages in a room. Args: requester (Requester): The user requesting messages. room_id (str): The room they want messages from. pagin_config (synapse.api.streams.PaginationConfig): The pagination config rules to apply, if any. as_client_event (bool): True to get events in client-server format. event_filter (Filter): Filter to apply to results or None Returns: dict: Pagination API results """ user_id = requester.user.to_string() if pagin_config.from_token: room_token = pagin_config.from_token.room_key else: pagin_config.from_token = ( yield self.hs.get_event_sources().get_current_token_for_room( room_id=room_id)) room_token = pagin_config.from_token.room_key room_token = RoomStreamToken.parse(room_token) pagin_config.from_token = pagin_config.from_token.copy_and_replace( "room_key", str(room_token)) source_config = pagin_config.get_source_config("room") with (yield self.pagination_lock.read(room_id)): membership, member_event_id = yield self._check_in_room_or_world_readable( room_id, user_id) if source_config.direction == 'b': # if we're going backwards, we might need to backfill. This # requires that we have a topo token. if room_token.topological: max_topo = room_token.topological else: max_topo = yield self.store.get_max_topological_token( room_id, room_token.stream) if membership == Membership.LEAVE: # If they have left the room then clamp the token to be before # they left the room, to save the effort of loading from the # database. leave_token = yield self.store.get_topological_token_for_event( member_event_id) leave_token = RoomStreamToken.parse(leave_token) if leave_token.topological < max_topo: source_config.from_key = str(leave_token) yield self.hs.get_handlers().federation_handler.maybe_backfill( room_id, max_topo) events, next_key = yield self.store.paginate_room_events( room_id=room_id, from_key=source_config.from_key, to_key=source_config.to_key, direction=source_config.direction, limit=source_config.limit, event_filter=event_filter, ) next_token = pagin_config.from_token.copy_and_replace( "room_key", next_key) if not events: defer.returnValue({ "chunk": [], "start": pagin_config.from_token.to_string(), "end": next_token.to_string(), }) if event_filter: events = event_filter.filter(events) events = yield filter_events_for_client( self.store, user_id, events, is_peeking=(member_event_id is None), ) time_now = self.clock.time_msec() chunk = { "chunk": [serialize_event(e, time_now, as_client_event) for e in events], "start": pagin_config.from_token.to_string(), "end": next_token.to_string(), } defer.returnValue(chunk)
def _room_initial_sync_joined(self, user_id, room_id, pagin_config, membership, is_peeking): current_state = yield self.state.get_current_state(room_id=room_id, ) # TODO: These concurrently time_now = self.clock.time_msec() state = [serialize_event(x, time_now) for x in current_state.values()] now_token = yield self.hs.get_event_sources().get_current_token() limit = pagin_config.limit if pagin_config else None if limit is None: limit = 10 room_members = [ m for m in current_state.values() if m.type == EventTypes.Member and m.content["membership"] == Membership.JOIN ] presence_handler = self.hs.get_presence_handler() @defer.inlineCallbacks def get_presence(): states = yield presence_handler.get_states( [m.user_id for m in room_members], as_event=True, ) defer.returnValue(states) @defer.inlineCallbacks def get_receipts(): receipts = yield self.store.get_linearized_receipts_for_room( room_id, to_key=now_token.receipt_key, ) if not receipts: receipts = [] defer.returnValue(receipts) presence, receipts, (messages, token) = yield defer.gatherResults( [ preserve_fn(get_presence)(), preserve_fn(get_receipts)(), preserve_fn(self.store.get_recent_events_for_room)( room_id, limit=limit, end_token=now_token.room_key, ) ], consumeErrors=True, ).addErrback(unwrapFirstError) messages = yield filter_events_for_client( self.store, user_id, messages, is_peeking=is_peeking, ) start_token = now_token.copy_and_replace("room_key", token[0]) end_token = now_token.copy_and_replace("room_key", token[1]) time_now = self.clock.time_msec() ret = { "room_id": room_id, "messages": { "chunk": [serialize_event(m, time_now) for m in messages], "start": start_token.to_string(), "end": end_token.to_string(), }, "state": state, "presence": presence, "receipts": receipts, } if not is_peeking: ret["membership"] = membership defer.returnValue(ret)
def _room_initial_sync_joined(self, user_id, room_id, pagin_config, membership, is_peeking): current_state = yield self.state.get_current_state( room_id=room_id, ) # TODO: These concurrently time_now = self.clock.time_msec() state = [ serialize_event(x, time_now) for x in current_state.values() ] now_token = yield self.hs.get_event_sources().get_current_token() limit = pagin_config.limit if pagin_config else None if limit is None: limit = 10 room_members = [ m for m in current_state.values() if m.type == EventTypes.Member and m.content["membership"] == Membership.JOIN ] presence_handler = self.hs.get_handlers().presence_handler @defer.inlineCallbacks def get_presence(): states = yield presence_handler.get_states( [m.user_id for m in room_members], as_event=True, ) defer.returnValue(states) @defer.inlineCallbacks def get_receipts(): receipts_handler = self.hs.get_handlers().receipts_handler receipts = yield receipts_handler.get_receipts_for_room( room_id, now_token.receipt_key ) defer.returnValue(receipts) presence, receipts, (messages, token) = yield defer.gatherResults( [ get_presence(), get_receipts(), self.store.get_recent_events_for_room( room_id, limit=limit, end_token=now_token.room_key, ) ], consumeErrors=True, ).addErrback(unwrapFirstError) messages = yield self._filter_events_for_client( user_id, messages, is_peeking=is_peeking, ) start_token = now_token.copy_and_replace("room_key", token[0]) end_token = now_token.copy_and_replace("room_key", token[1]) time_now = self.clock.time_msec() ret = { "room_id": room_id, "messages": { "chunk": [serialize_event(m, time_now) for m in messages], "start": start_token.to_string(), "end": end_token.to_string(), }, "state": state, "presence": presence, "receipts": receipts, } if not is_peeking: ret["membership"] = membership defer.returnValue(ret)