async def on_GET(self, request: SynapseRequest, room_identifier: str) -> Tuple[int, JsonDict]: try: requester = await self._auth.get_user_by_req(request, allow_guest=True) requester_user_id: Optional[str] = requester.user.to_string() except MissingClientTokenError: # auth is optional requester_user_id = None # twisted.web.server.Request.args is incorrectly defined as Optional[Any] args: Dict[bytes, List[bytes]] = request.args # type: ignore remote_room_hosts = parse_strings_from_args(args, "via", required=False) room_id, remote_room_hosts = await self.resolve_room_id( room_identifier, remote_room_hosts, ) return 200, await self._room_summary_handler.get_room_summary( requester_user_id, room_id, remote_room_hosts, )
async def on_GET( self, origin: str, content: Literal[None], query: Mapping[bytes, Sequence[bytes]], room_id: str, ) -> Tuple[int, JsonDict]: suggested_only = parse_boolean_from_args(query, "suggested_only", default=False) max_rooms_per_space = parse_integer_from_args(query, "max_rooms_per_space") if max_rooms_per_space is not None and max_rooms_per_space < 0: raise SynapseError( 400, "Value for 'max_rooms_per_space' must be a non-negative integer", Codes.BAD_JSON, ) exclude_rooms = parse_strings_from_args(query, "exclude_rooms", default=[]) return 200, await self.handler.federation_space_summary( origin, room_id, suggested_only, max_rooms_per_space, exclude_rooms)
async def on_POST( self, request: SynapseRequest, room_identifier: str, txn_id: Optional[str] = None, ): requester = await self.auth.get_user_by_req(request, allow_guest=True) try: content = parse_json_object_from_request(request) except Exception: # Turns out we used to ignore the body entirely, and some clients # cheekily send invalid bodies. content = {} # twisted.web.server.Request.args is incorrectly defined as Optional[Any] args: Dict[bytes, List[bytes]] = request.args # type: ignore remote_room_hosts = parse_strings_from_args(args, "server_name", required=False) room_id, remote_room_hosts = await self.resolve_room_id( room_identifier, remote_room_hosts, ) await self.room_member_handler.update_membership( requester=requester, target=requester.user, room_id=room_id, action="join", txn_id=txn_id, remote_room_hosts=remote_room_hosts, content=content, third_party_signed=content.get("third_party_signed", None), ) return 200, {"room_id": room_id}
async def on_GET( self, origin: str, content: Literal[None], query: Dict[bytes, List[bytes]], room_id: str, user_id: str, ) -> Tuple[int, JsonDict]: """ Args: origin: The authenticated server_name of the calling server content: (GETs don't have bodies) query: Query params from the request. **kwargs: the dict mapping keys to path components as specified in the path match regexp. Returns: Tuple of (response code, response object) """ supported_versions = parse_strings_from_args(query, "ver", encoding="utf-8") if supported_versions is None: supported_versions = ["1"] result = await self.handler.on_make_join_request( origin, room_id, user_id, supported_versions=supported_versions) return 200, result
async def on_GET( self, origin: str, content: Literal[None], query: Dict[bytes, List[bytes]], room_id: str, user_id: str, ) -> Tuple[int, JsonDict]: # Retrieve the room versions the remote homeserver claims to support supported_versions = parse_strings_from_args(query, "ver", required=True, encoding="utf-8") result = await self.handler.on_make_knock_request( origin, room_id, user_id, supported_versions=supported_versions) return 200, result
async def on_POST( self, request: SynapseRequest, room_identifier: str, txn_id: Optional[str] = None, ) -> Tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) content = parse_json_object_from_request(request) event_content = None if "reason" in content: event_content = {"reason": content["reason"]} if RoomID.is_valid(room_identifier): room_id = room_identifier # twisted.web.server.Request.args is incorrectly defined as Optional[Any] args: Dict[bytes, List[bytes]] = request.args # type: ignore remote_room_hosts = parse_strings_from_args(args, "server_name", required=False) elif RoomAlias.is_valid(room_identifier): handler = self.room_member_handler room_alias = RoomAlias.from_string(room_identifier) room_id_obj, remote_room_hosts = await handler.lookup_room_alias( room_alias) room_id = room_id_obj.to_string() else: raise SynapseError( 400, "%s was not legal room ID or room alias" % (room_identifier, )) await self.room_member_handler.update_membership( requester=requester, target=requester.user, room_id=room_id, action=Membership.KNOCK, txn_id=txn_id, third_party_signed=None, remote_room_hosts=remote_room_hosts, content=event_content, ) return 200, {"room_id": room_id}
async def on_POST(self, request: SynapseRequest, room_id: str) -> Tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=False) if not requester.app_service: raise AuthError( HTTPStatus.FORBIDDEN, "Only application services can use the /batchsend endpoint", ) body = parse_json_object_from_request(request) assert_params_in_dict(body, ["state_events_at_start", "events"]) assert request.args is not None prev_event_ids_from_query = parse_strings_from_args( request.args, "prev_event_id") batch_id_from_query = parse_string(request, "batch_id") if prev_event_ids_from_query is None: raise SynapseError( HTTPStatus.BAD_REQUEST, "prev_event query parameter is required when inserting historical messages back in time", errcode=Codes.MISSING_PARAM, ) # Verify the batch_id_from_query corresponds to an actual insertion event # and have the batch connected. if batch_id_from_query: corresponding_insertion_event_id = ( await self.store.get_insertion_event_by_batch_id( room_id, batch_id_from_query)) if corresponding_insertion_event_id is None: raise SynapseError( HTTPStatus.BAD_REQUEST, "No insertion event corresponds to the given ?batch_id", errcode=Codes.INVALID_PARAM, ) # For the event we are inserting next to (`prev_event_ids_from_query`), # find the most recent auth events (derived from state events) that # allowed that message to be sent. We will use that as a base # to auth our historical messages against. auth_event_ids = await self.room_batch_handler.get_most_recent_auth_event_ids_from_event_id_list( prev_event_ids_from_query) # Create and persist all of the state events that float off on their own # before the batch. These will most likely be all of the invite/member # state events used to auth the upcoming historical messages. state_event_ids_at_start = ( await self.room_batch_handler.persist_state_events_at_start( state_events_at_start=body["state_events_at_start"], room_id=room_id, initial_auth_event_ids=auth_event_ids, app_service_requester=requester, )) # Update our ongoing auth event ID list with all of the new state we # just created auth_event_ids.extend(state_event_ids_at_start) inherited_depth = await self.room_batch_handler.inherit_depth_from_prev_ids( prev_event_ids_from_query) events_to_create = body["events"] # Figure out which batch to connect to. If they passed in # batch_id_from_query let's use it. The batch ID passed in comes # from the batch_id in the "insertion" event from the previous batch. last_event_in_batch = events_to_create[-1] base_insertion_event = None if batch_id_from_query: batch_id_to_connect_to = batch_id_from_query # All but the first base insertion event should point at a fake # event, which causes the HS to ask for the state at the start of # the batch later. fake_prev_event_id = "$" + random_string(43) prev_event_ids = [fake_prev_event_id] # Otherwise, create an insertion event to act as a starting point. # # We don't always have an insertion event to start hanging more history # off of (ideally there would be one in the main DAG, but that's not the # case if we're wanting to add history to e.g. existing rooms without # an insertion event), in which case we just create a new insertion event # that can then get pointed to by a "marker" event later. else: prev_event_ids = prev_event_ids_from_query base_insertion_event_dict = ( self.room_batch_handler.create_insertion_event_dict( sender=requester.user.to_string(), room_id=room_id, origin_server_ts=last_event_in_batch["origin_server_ts"], )) base_insertion_event_dict["prev_events"] = prev_event_ids.copy() ( base_insertion_event, _, ) = await self.event_creation_handler.create_and_send_nonmember_event( await self.room_batch_handler. create_requester_for_user_id_from_app_service( base_insertion_event_dict["sender"], requester.app_service, ), base_insertion_event_dict, prev_event_ids=base_insertion_event_dict.get("prev_events"), auth_event_ids=auth_event_ids, historical=True, depth=inherited_depth, ) batch_id_to_connect_to = base_insertion_event["content"][ EventContentFields.MSC2716_NEXT_BATCH_ID] # Create and persist all of the historical events as well as insertion # and batch meta events to make the batch navigable in the DAG. event_ids, next_batch_id = await self.room_batch_handler.handle_batch_of_events( events_to_create=events_to_create, room_id=room_id, batch_id_to_connect_to=batch_id_to_connect_to, initial_prev_event_ids=prev_event_ids, inherited_depth=inherited_depth, auth_event_ids=auth_event_ids, app_service_requester=requester, ) insertion_event_id = event_ids[0] batch_event_id = event_ids[-1] historical_event_ids = event_ids[1:-1] response_dict = { "state_event_ids": state_event_ids_at_start, "event_ids": historical_event_ids, "next_batch_id": next_batch_id, "insertion_event_id": insertion_event_id, "batch_event_id": batch_event_id, } if base_insertion_event is not None: response_dict[ "base_insertion_event_id"] = base_insertion_event.event_id return HTTPStatus.OK, response_dict
async def on_POST(self, request, room_id): requester = await self.auth.get_user_by_req(request, allow_guest=False) if not requester.app_service: raise AuthError( 403, "Only application services can use the /batchsend endpoint", ) body = parse_json_object_from_request(request) assert_params_in_dict(body, ["state_events_at_start", "events"]) prev_events_from_query = parse_strings_from_args( request.args, "prev_event") chunk_id_from_query = parse_string(request, "chunk_id") if prev_events_from_query is None: raise SynapseError( 400, "prev_event query parameter is required when inserting historical messages back in time", errcode=Codes.MISSING_PARAM, ) # For the event we are inserting next to (`prev_events_from_query`), # find the most recent auth events (derived from state events) that # allowed that message to be sent. We will use that as a base # to auth our historical messages against. ( most_recent_prev_event_id, _, ) = await self.store.get_max_depth_of(prev_events_from_query) # mapping from (type, state_key) -> state_event_id prev_state_map = await self.state_store.get_state_ids_for_event( most_recent_prev_event_id) # List of state event ID's prev_state_ids = list(prev_state_map.values()) auth_event_ids = prev_state_ids state_events_at_start = [] for state_event in body["state_events_at_start"]: assert_params_in_dict( state_event, ["type", "origin_server_ts", "content", "sender"]) logger.debug( "RoomBatchSendEventRestServlet inserting state_event=%s, auth_event_ids=%s", state_event, auth_event_ids, ) event_dict = { "type": state_event["type"], "origin_server_ts": state_event["origin_server_ts"], "content": state_event["content"], "room_id": room_id, "sender": state_event["sender"], "state_key": state_event["state_key"], } # Mark all events as historical event_dict["content"][EventContentFields.MSC2716_HISTORICAL] = True # Make the state events float off on their own fake_prev_event_id = "$" + random_string(43) # TODO: This is pretty much the same as some other code to handle inserting state in this file if event_dict["type"] == EventTypes.Member: membership = event_dict["content"].get("membership", None) event_id, _ = await self.room_member_handler.update_membership( await self._create_requester_for_user_id_from_app_service( state_event["sender"], requester.app_service), target=UserID.from_string(event_dict["state_key"]), room_id=room_id, action=membership, content=event_dict["content"], outlier=True, prev_event_ids=[fake_prev_event_id], # Make sure to use a copy of this list because we modify it # later in the loop here. Otherwise it will be the same # reference and also update in the event when we append later. auth_event_ids=auth_event_ids.copy(), ) else: # TODO: Add some complement tests that adds state that is not member joins # and will use this code path. Maybe we only want to support join state events # and can get rid of this `else`? ( event, _, ) = await self.event_creation_handler.create_and_send_nonmember_event( await self._create_requester_for_user_id_from_app_service( state_event["sender"], requester.app_service), event_dict, outlier=True, prev_event_ids=[fake_prev_event_id], # Make sure to use a copy of this list because we modify it # later in the loop here. Otherwise it will be the same # reference and also update in the event when we append later. auth_event_ids=auth_event_ids.copy(), ) event_id = event.event_id state_events_at_start.append(event_id) auth_event_ids.append(event_id) events_to_create = body["events"] inherited_depth = await self._inherit_depth_from_prev_ids( prev_events_from_query) # Figure out which chunk to connect to. If they passed in # chunk_id_from_query let's use it. The chunk ID passed in comes # from the chunk_id in the "insertion" event from the previous chunk. last_event_in_chunk = events_to_create[-1] chunk_id_to_connect_to = chunk_id_from_query base_insertion_event = None if chunk_id_from_query: # All but the first base insertion event should point at a fake # event, which causes the HS to ask for the state at the start of # the chunk later. prev_event_ids = [fake_prev_event_id] # TODO: Verify the chunk_id_from_query corresponds to an insertion event pass # Otherwise, create an insertion event to act as a starting point. # # We don't always have an insertion event to start hanging more history # off of (ideally there would be one in the main DAG, but that's not the # case if we're wanting to add history to e.g. existing rooms without # an insertion event), in which case we just create a new insertion event # that can then get pointed to by a "marker" event later. else: prev_event_ids = prev_events_from_query base_insertion_event_dict = self._create_insertion_event_dict( sender=requester.user.to_string(), room_id=room_id, origin_server_ts=last_event_in_chunk["origin_server_ts"], ) base_insertion_event_dict["prev_events"] = prev_event_ids.copy() ( base_insertion_event, _, ) = await self.event_creation_handler.create_and_send_nonmember_event( await self._create_requester_for_user_id_from_app_service( base_insertion_event_dict["sender"], requester.app_service, ), base_insertion_event_dict, prev_event_ids=base_insertion_event_dict.get("prev_events"), auth_event_ids=auth_event_ids, historical=True, depth=inherited_depth, ) chunk_id_to_connect_to = base_insertion_event["content"][ EventContentFields.MSC2716_NEXT_CHUNK_ID] # Connect this current chunk to the insertion event from the previous chunk chunk_event = { "type": EventTypes.MSC2716_CHUNK, "sender": requester.user.to_string(), "room_id": room_id, "content": { EventContentFields.MSC2716_CHUNK_ID: chunk_id_to_connect_to, EventContentFields.MSC2716_HISTORICAL: True, }, # Since the chunk event is put at the end of the chunk, # where the newest-in-time event is, copy the origin_server_ts from # the last event we're inserting "origin_server_ts": last_event_in_chunk["origin_server_ts"], } # Add the chunk event to the end of the chunk (newest-in-time) events_to_create.append(chunk_event) # Add an "insertion" event to the start of each chunk (next to the oldest-in-time # event in the chunk) so the next chunk can be connected to this one. insertion_event = self._create_insertion_event_dict( sender=requester.user.to_string(), room_id=room_id, # Since the insertion event is put at the start of the chunk, # where the oldest-in-time event is, copy the origin_server_ts from # the first event we're inserting origin_server_ts=events_to_create[0]["origin_server_ts"], ) # Prepend the insertion event to the start of the chunk (oldest-in-time) events_to_create = [insertion_event] + events_to_create event_ids = [] events_to_persist = [] for ev in events_to_create: assert_params_in_dict( ev, ["type", "origin_server_ts", "content", "sender"]) event_dict = { "type": ev["type"], "origin_server_ts": ev["origin_server_ts"], "content": ev["content"], "room_id": room_id, "sender": ev["sender"], # requester.user.to_string(), "prev_events": prev_event_ids.copy(), } # Mark all events as historical event_dict["content"][EventContentFields.MSC2716_HISTORICAL] = True event, context = await self.event_creation_handler.create_event( await self._create_requester_for_user_id_from_app_service( ev["sender"], requester.app_service), event_dict, prev_event_ids=event_dict.get("prev_events"), auth_event_ids=auth_event_ids, historical=True, depth=inherited_depth, ) logger.debug( "RoomBatchSendEventRestServlet inserting event=%s, prev_event_ids=%s, auth_event_ids=%s", event, prev_event_ids, auth_event_ids, ) assert self.hs.is_mine_id( event.sender), "User must be our own: %s" % (event.sender, ) events_to_persist.append((event, context)) event_id = event.event_id event_ids.append(event_id) prev_event_ids = [event_id] # Persist events in reverse-chronological order so they have the # correct stream_ordering as they are backfilled (which decrements). # Events are sorted by (topological_ordering, stream_ordering) # where topological_ordering is just depth. for (event, context) in reversed(events_to_persist): ev = await self.event_creation_handler.handle_new_client_event( await self._create_requester_for_user_id_from_app_service( event["sender"], requester.app_service), event=event, context=context, ) # Add the base_insertion_event to the bottom of the list we return if base_insertion_event is not None: event_ids.append(base_insertion_event.event_id) return 200, { "state_events": state_events_at_start, "events": event_ids, "next_chunk_id": insertion_event["content"][ EventContentFields.MSC2716_NEXT_CHUNK_ID], }