def create_content(self, media_type, upload_name, content, content_length, auth_user): media_id = random_string(24) fname = self.filepaths.local_media_filepath(media_id) self._makedirs(fname) # This shouldn't block for very long because the content will have # already been uploaded at this point. with open(fname, "wb") as f: f.write(content) yield self.store.store_local_media( media_id=media_id, media_type=media_type, time_now_ms=self.clock.time_msec(), upload_name=upload_name, media_length=content_length, user_id=auth_user, ) media_info = { "media_type": media_type, "media_length": content_length, } yield self._generate_local_thumbnails(media_id, media_info) defer.returnValue("mxc://%s/%s" % (self.server_name, media_id))
def __init__(self, clock): self.clock = clock self.last_received_command = self.clock.time_msec() self.last_sent_command = 0 self.time_we_closed = None # When we requested the connection be closed self.received_ping = False # Have we reecived a ping from the other side self.state = ConnectionStates.CONNECTING self.name = "anon" # The name sent by a client. self.conn_id = random_string(5) # To dedupe in case of name clashes. # List of pending commands to send once we've established the connection self.pending_commands = [] # The LoopingCall for sending pings. self._send_ping_loop = None self.inbound_commands_counter = CounterMetric( "inbound_commands", labels=["command"], ) self.outbound_commands_counter = CounterMetric( "outbound_commands", labels=["command"], )
def __init__(self, hs): self.is_mine_id = hs.is_mine_id self.http_client = hs.get_simple_http_client() self.store = hs.get_datastore() self.user_to_num_current_syncs = {} self.syncing_users_url = hs.config.worker_replication_url + "/syncing_users" self.clock = hs.get_clock() self.notifier = hs.get_notifier() active_presence = self.store.take_presence_startup_info() self.user_to_current_state = { state.user_id: state for state in active_presence } self.process_id = random_string(16) logger.info("Presence process_id is %r", self.process_id) self._sending_sync = False self._need_to_send_sync = False self.clock.looping_call( self._send_syncing_users_regularly, UPDATE_SYNCING_USERS_MS, ) reactor.addSystemEventTrigger("before", "shutdown", self._on_shutdown)
def __init__(self, hs): self.hs = hs self.is_mine_id = hs.is_mine_id self.http_client = hs.get_simple_http_client() self.store = hs.get_datastore() self.user_to_num_current_syncs = {} self.clock = hs.get_clock() self.notifier = hs.get_notifier() active_presence = self.store.take_presence_startup_info() self.user_to_current_state = { state.user_id: state for state in active_presence } # user_id -> last_sync_ms. Lists the users that have stopped syncing # but we haven't notified the master of that yet self.users_going_offline = {} self._send_stop_syncing_loop = self.clock.looping_call( self.send_stop_syncing, 10 * 1000 ) self.process_id = random_string(16) logger.info("Presence process_id is %r", self.process_id)
def test_exchange_refresh_token_none(self): uid = stringutils.random_string(32) generator = TokenGenerator() last_token = generator.generate(uid) with self.assertRaises(StoreError): yield self.store.exchange_refresh_token(last_token, generator.generate)
def start_purge_history(self, room_id, token, delete_local_events=False): """Start off a history purge on a room. Args: room_id (str): The room to purge from token (str): topological token to delete events before delete_local_events (bool): True to delete local events as well as remote ones Returns: str: unique ID for this purge transaction. """ if room_id in self._purges_in_progress_by_room: raise SynapseError( 400, "History purge already in progress for %s" % (room_id, ), ) purge_id = random_string(16) # we log the purge_id here so that it can be tied back to the # request id in the log lines. logger.info("[purge] starting purge_id %s", purge_id) self._purges_by_id[purge_id] = PurgeStatus() run_in_background( self._purge_history, purge_id, room_id, token, delete_local_events, ) return purge_id
def send_request(**kwargs): data = yield cls._serialize_payload(**kwargs) url_args = [ urllib.parse.quote(kwargs[name], safe="") for name in cls.PATH_ARGS ] if cls.CACHE: txn_id = random_string(10) url_args.append(txn_id) if cls.METHOD == "POST": request_func = client.post_json_get_json elif cls.METHOD == "PUT": request_func = client.put_json elif cls.METHOD == "GET": request_func = client.get_json else: # We have already asserted in the constructor that a # compatible was picked, but lets be paranoid. raise Exception( "Unknown METHOD on %s replication endpoint" % (cls.NAME,) ) uri = "http://%s:%s/_synapse/replication/%s/%s" % ( host, port, cls.NAME, "/".join(url_args), ) try: # We keep retrying the same request for timeouts. This is so that we # have a good idea that the request has either succeeded or failed on # the master, and so whether we should clean up or not. while True: try: result = yield request_func(uri, data) break except CodeMessageException as e: if e.code != 504 or not cls.RETRY_ON_TIMEOUT: raise logger.warn("%s request timed out", cls.NAME) # If we timed out we probably don't need to worry about backing # off too much, but lets just wait a little anyway. yield clock.sleep(1) except HttpResponseException as e: # We convert to SynapseError as we know that it was a SynapseError # on the master process that we should send to the client. (And # importantly, not stack traces everywhere) raise e.to_synapse_error() except RequestSendFailed as e: raise_from(SynapseError(502, "Failed to talk to master"), e) defer.returnValue(result)
def create_event_id(self): i = str(self.event_id_count) self.event_id_count += 1 local_part = str(int(self.clock.time())) + i + random_string(5) e_id = EventID.create(local_part, self.hostname) return e_id.to_string()
async def on_POST(self, request): body = parse_json_object_from_request(request) assert_params_in_dict( body, ["client_secret", "country", "phone_number", "send_attempt"]) client_secret = body["client_secret"] country = body["country"] phone_number = body["phone_number"] send_attempt = body["send_attempt"] next_link = body.get("next_link") # Optional param msisdn = phone_number_to_msisdn(country, phone_number) if not check_3pid_allowed(self.hs, "msisdn", msisdn): raise SynapseError( 403, "Phone numbers are not authorized to register on this server", Codes.THREEPID_DENIED, ) existing_user_id = await self.hs.get_datastore( ).get_user_id_by_threepid("msisdn", msisdn) if existing_user_id is not None: if self.hs.config.request_token_inhibit_3pid_errors: # Make the client think the operation succeeded. See the rationale in the # comments for request_token_inhibit_3pid_errors. # Also wait for some random amount of time between 100ms and 1s to make it # look like we did something. await self.hs.get_clock().sleep(random.randint(1, 10) / 10) return 200, {"sid": random_string(16)} raise SynapseError(400, "Phone number is already in use", Codes.THREEPID_IN_USE) if not self.hs.config.account_threepid_delegate_msisdn: logger.warning( "No upstream msisdn account_threepid_delegate configured on the server to " "handle this request") raise SynapseError( 400, "Registration by phone number is not supported on this homeserver" ) ret = await self.identity_handler.requestMsisdnToken( self.hs.config.account_threepid_delegate_msisdn, country, phone_number, client_secret, send_attempt, next_link, ) threepid_send_requests.labels(type="msisdn", reason="register").observe(send_attempt) return 200, ret
async def create_ui_auth_session( self, clientdict: JsonDict, uri: str, method: str, description: str, ) -> UIAuthSessionData: """ Creates a new user interactive authentication session. The session can be used to track the stages necessary to authenticate a user across multiple HTTP requests. Args: clientdict: The dictionary from the client root level, not the 'auth' key. uri: The URI this session was initiated with, this is checked at each stage of the authentication to ensure that the asked for operation has not changed. method: The method this session was initiated with, this is checked at each stage of the authentication to ensure that the asked for operation has not changed. description: A string description of the operation that the current authentication is authorising. Returns: The newly created session. Raises: StoreError if a unique session ID cannot be generated. """ # The clientdict gets stored as JSON. clientdict_json = json.dumps(clientdict) # autogen a session ID and try to create it. We may clash, so just # try a few times till one goes through, giving up eventually. attempts = 0 while attempts < 5: session_id = stringutils.random_string(24) try: await self.db_pool.simple_insert( table="ui_auth_sessions", values={ "session_id": session_id, "clientdict": clientdict_json, "uri": uri, "method": method, "description": description, "serverdict": "{}", "creation_time": self.hs.get_clock().time_msec(), }, desc="create_ui_auth_session", ) return UIAuthSessionData( session_id, clientdict, uri, method, description ) except self.db_pool.engine.module.IntegrityError: attempts += 1 raise StoreError(500, "Couldn't generate a session ID.")
def map_request_to_name(self, request): # auth the user auth_user = yield self.auth.get_user_by_req(request) # namespace all file uploads on the user prefix = base64.urlsafe_b64encode( auth_user.to_string() ).replace('=', '') # use a random string for the main portion main_part = random_string(24) # suffix with a file extension if we can make one. This is nice to # provide a hint to clients on the file information. We will also reuse # this info to spit back the content type to the client. suffix = "" if request.requestHeaders.hasHeader("Content-Type"): content_type = request.requestHeaders.getRawHeaders( "Content-Type")[0] suffix = "." + base64.urlsafe_b64encode(content_type) if (content_type.split("/")[0].lower() in ["image", "video", "audio"]): file_ext = content_type.split("/")[-1] # be a little paranoid and only allow a-z file_ext = re.sub("[^a-z]", "", file_ext) suffix += "." + file_ext file_name = prefix + main_part + suffix file_path = os.path.join(self.directory, file_name) logger.info("User %s is uploading a file to path %s", auth_user.to_string(), file_path) # keep trying to make a non-clashing file, with a sensible max attempts attempts = 0 while os.path.exists(file_path): main_part = random_string(24) file_name = prefix + main_part + suffix file_path = os.path.join(self.directory, file_name) attempts += 1 if attempts > 25: # really? Really? raise SynapseError(500, "Unable to create file.") defer.returnValue(file_path)
async def check_device_registered( self, user_id: str, device_id: Optional[str], initial_device_display_name: Optional[str] = None, auth_provider_id: Optional[str] = None, auth_provider_session_id: Optional[str] = None, ) -> str: """ If the given device has not been registered, register it with the supplied display name. If no device_id is supplied, we make one up. Args: user_id: @user:id device_id: device id supplied by client initial_device_display_name: device display name from client auth_provider_id: The SSO IdP the user used, if any. auth_provider_session_id: The session ID (sid) got from the SSO IdP. Returns: device id (generated if none was supplied) """ self._check_device_name_length(initial_device_display_name) if device_id is not None: new_device = await self.store.store_device( user_id=user_id, device_id=device_id, initial_device_display_name=initial_device_display_name, auth_provider_id=auth_provider_id, auth_provider_session_id=auth_provider_session_id, ) if new_device: await self.notify_device_update(user_id, [device_id]) return device_id # if the device id is not specified, we'll autogen one, but loop a few # times in case of a clash. attempts = 0 while attempts < 5: new_device_id = stringutils.random_string(10).upper() new_device = await self.store.store_device( user_id=user_id, device_id=new_device_id, initial_device_display_name=initial_device_display_name, auth_provider_id=auth_provider_id, auth_provider_session_id=auth_provider_session_id, ) if new_device: await self.notify_device_update(user_id, [new_device_id]) return new_device_id attempts += 1 raise errors.StoreError(500, "Couldn't generate a device ID.")
def test_room_creation_too_long(self) -> None: url = "/_matrix/client/r0/createRoom" # We use deliberately a localpart under the length threshold so # that we can make sure that the check is done on the whole alias. data = {"room_alias_name": random_string(256 - len(self.hs.hostname))} request_data = json.dumps(data) channel = self.make_request( "POST", url, request_data, access_token=self.user_tok ) self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.result)
def generate_files(self, config): signing_key_path = config["signing_key_path"] if not os.path.exists(signing_key_path): with open(signing_key_path, "w") as signing_key_file: key_id = "a_" + random_string(4) write_signing_keys( signing_key_file, (generate_signing_key(key_id),), ) else: signing_keys = self.read_file(signing_key_path, "signing_key") if len(signing_keys.split("\n")[0].split()) == 1: # handle keys in the old format. key_id = "a_" + random_string(4) key = decode_signing_key_base64( NACL_ED25519, key_id, signing_keys.split("\n")[0] ) with open(signing_key_path, "w") as signing_key_file: write_signing_keys( signing_key_file, (key,), )
async def send_device_message(self, sender_user_id, message_type, messages): set_tag("number_of_messages", len(messages)) set_tag("sender", sender_user_id) local_messages = {} remote_messages = {} for user_id, by_device in messages.items(): # we use UserID.from_string to catch invalid user ids if self.is_mine(UserID.from_string(user_id)): messages_by_device = { device_id: { "content": message_content, "type": message_type, "sender": sender_user_id, } for device_id, message_content in by_device.items() } if messages_by_device: local_messages[user_id] = messages_by_device else: destination = get_domain_from_id(user_id) remote_messages.setdefault(destination, {})[user_id] = by_device message_id = random_string(16) context = get_active_span_text_map() remote_edu_contents = {} for destination, messages in remote_messages.items(): with start_active_span("to_device_for_user"): set_tag("destination", destination) remote_edu_contents[destination] = { "messages": messages, "sender": sender_user_id, "type": message_type, "message_id": message_id, "org.matrix.opentracing_context": json_encoder.encode(context), } log_kv({"local_messages": local_messages}) stream_id = await self.store.add_messages_to_device_inbox( local_messages, remote_edu_contents) self.notifier.on_new_event("to_device_key", stream_id, users=local_messages.keys()) log_kv({"remote_messages": remote_messages}) for destination in remote_messages.keys(): # Enqueue a new federation transaction to send the new # device messages to each remote destination. self.federation.send_device_messages(destination)
def _get_session_info(self, session_id): if session_id not in self.sessions: session_id = None if not session_id: # create a new session while session_id is None or session_id in self.sessions: session_id = stringutils.random_string(24) self.sessions[session_id] = {"id": session_id} return self.sessions[session_id]
def test_room_creation_too_long(self): url = "/_matrix/client/r0/createRoom" # We use deliberately a localpart under the length threshold so # that we can make sure that the check is done on the whole alias. data = {"room_alias_name": random_string(256 - len(self.hs.hostname))} request_data = json.dumps(data) request, channel = self.make_request( "POST", url, request_data, access_token=self.user_tok ) self.render(request) self.assertEqual(channel.code, 400, channel.result)
def _get_session_info(self, request, session_id): if not session_id: # create a new session while session_id is None or session_id in self.sessions: session_id = stringutils.random_string(24) self.sessions[session_id] = { "id": session_id, LoginType.EMAIL_IDENTITY: False, LoginType.RECAPTCHA: False } return self.sessions[session_id]
def test_exchange_refresh_token_invalid(self): uid = stringutils.random_string(32) generator = TokenGenerator() last_token = generator.generate(uid) wrong_token = "%s-wrong" % (last_token,) self.db_pool.runQuery( "INSERT INTO refresh_tokens(user_id, token) VALUES(?,?)", (uid, wrong_token,)) with self.assertRaises(StoreError): yield self.store.exchange_refresh_token(last_token, generator.generate)
def test_room_creation(self) -> None: url = "/_matrix/client/r0/createRoom" # Check with an alias of allowed length. There should already be # a test that ensures it works in test_register.py, but let's be # as cautious as possible here. data = {"room_alias_name": random_string(5)} request_data = json.dumps(data) channel = self.make_request( "POST", url, request_data, access_token=self.user_tok ) self.assertEqual(channel.code, HTTPStatus.OK, channel.result)
def send_request(**kwargs): data = yield cls._serialize_payload(**kwargs) url_args = [ urllib.parse.quote(kwargs[name], safe='') for name in cls.PATH_ARGS ] if cls.CACHE: txn_id = random_string(10) url_args.append(txn_id) if cls.METHOD == "POST": request_func = client.post_json_get_json elif cls.METHOD == "PUT": request_func = client.put_json elif cls.METHOD == "GET": request_func = client.get_json else: # We have already asserted in the constructor that a # compatible was picked, but lets be paranoid. raise Exception( "Unknown METHOD on %s replication endpoint" % (cls.NAME,) ) uri = "http://%s:%s/_synapse/replication/%s/%s" % ( host, port, cls.NAME, "/".join(url_args) ) try: # We keep retrying the same request for timeouts. This is so that we # have a good idea that the request has either succeeded or failed on # the master, and so whether we should clean up or not. while True: try: result = yield request_func(uri, data) break except CodeMessageException as e: if e.code != 504 or not cls.RETRY_ON_TIMEOUT: raise logger.warn("%s request timed out", cls.NAME) # If we timed out we probably don't need to worry about backing # off too much, but lets just wait a little anyway. yield clock.sleep(1) except HttpResponseException as e: # We convert to SynapseError as we know that it was a SynapseError # on the master process that we should send to the client. (And # importantly, not stack traces everywhere) raise e.to_synapse_error() defer.returnValue(result)
async def _redirect_to_username_picker( self, auth_provider_id: str, remote_user_id: str, attributes: UserAttributes, client_redirect_url: str, extra_login_attributes: Optional[JsonDict], ) -> NoReturn: """Creates a UsernameMappingSession and redirects the browser Called if the user mapping provider doesn't return a localpart for a new user. Raises a RedirectException which redirects the browser to the username picker. Args: auth_provider_id: A unique identifier for this SSO provider, e.g. "oidc" or "saml". remote_user_id: The unique identifier from the SSO provider. attributes: the user attributes returned by the user mapping provider. client_redirect_url: The redirect URL passed in by the client, which we will eventually redirect back to. extra_login_attributes: An optional dictionary of extra attributes to be provided to the client in the login response. Raises: RedirectException """ session_id = random_string(16) now = self._clock.time_msec() session = UsernameMappingSession( auth_provider_id=auth_provider_id, remote_user_id=remote_user_id, display_name=attributes.display_name, emails=attributes.emails, client_redirect_url=client_redirect_url, expiry_time_ms=now + self._MAPPING_SESSION_VALIDITY_PERIOD_MS, extra_login_attributes=extra_login_attributes, ) self._username_mapping_sessions[session_id] = session logger.info("Recorded registration session id %s", session_id) # Set the cookie and redirect to the username picker e = RedirectException( b"/_synapse/client/pick_username/account_details") e.cookies.append( b"%s=%s; path=/" % (USERNAME_MAPPING_SESSION_COOKIE_NAME, session_id.encode("ascii"))) raise e
async def on_POST(self, request): body = parse_json_object_from_request(request) assert_params_in_dict( body, ["client_secret", "country", "phone_number", "send_attempt"]) client_secret = body["client_secret"] assert_valid_client_secret(client_secret) country = body["country"] phone_number = body["phone_number"] send_attempt = body["send_attempt"] next_link = body.get("next_link") # Optional param msisdn = phone_number_to_msisdn(country, phone_number) if not check_3pid_allowed(self.hs, "msisdn", msisdn): raise SynapseError( 403, "Account phone numbers are not authorized on this server", Codes.THREEPID_DENIED, ) existing_user_id = await self.store.get_user_id_by_threepid( "msisdn", msisdn) if existing_user_id is not None: if self.hs.config.request_token_inhibit_3pid_errors: # Make the client think the operation succeeded. See the rationale in the # comments for request_token_inhibit_3pid_errors. return 200, {"sid": random_string(16)} raise SynapseError(400, "MSISDN is already in use", Codes.THREEPID_IN_USE) if not self.hs.config.account_threepid_delegate_msisdn: logger.warning( "No upstream msisdn account_threepid_delegate configured on the server to " "handle this request") raise SynapseError( 400, "Adding phone numbers to user account is not supported by this homeserver", ) ret = await self.identity_handler.requestMsisdnToken( self.hs.config.account_threepid_delegate_msisdn, country, phone_number, client_secret, send_attempt, next_link, ) return 200, ret
def _get_session_info(self, session_id): if session_id not in self.sessions: session_id = None if not session_id: # create a new session while session_id is None or session_id in self.sessions: session_id = stringutils.random_string(24) self.sessions[session_id] = { "id": session_id, } return self.sessions[session_id]
def test_room_creation(self): url = "/_matrix/client/r0/createRoom" # Check with an alias of allowed length. There should already be # a test that ensures it works in test_register.py, but let's be # as cautious as possible here. data = {"room_alias_name": random_string(5)} request_data = json.dumps(data) request, channel = self.make_request( "POST", url, request_data, access_token=self.user_tok ) self.render(request) self.assertEqual(channel.code, 200, channel.result)
def test_exchange_refresh_token_valid(self): uid = stringutils.random_string(32) device_id = stringutils.random_string(16) generator = TokenGenerator() last_token = generator.generate(uid) self.db_pool.runQuery( "INSERT INTO refresh_tokens(user_id, token, device_id) " "VALUES(?,?,?)", (uid, last_token, device_id)) (found_user_id, refresh_token, device_id) = \ yield self.store.exchange_refresh_token(last_token, generator.generate) self.assertEqual(uid, found_user_id) rows = yield self.db_pool.runQuery( "SELECT token, device_id FROM refresh_tokens WHERE user_id = ?", (uid, )) self.assertEqual([(refresh_token, device_id)], rows) # We issued token 1, then exchanged it for token 2 expected_refresh_token = u"%s-%d" % (uid, 2,) self.assertEqual(expected_refresh_token, refresh_token)
def test_exchange_refresh_token_invalid(self): uid = stringutils.random_string(32) generator = TokenGenerator() last_token = generator.generate(uid) wrong_token = "%s-wrong" % (last_token, ) self.db_pool.runQuery( "INSERT INTO refresh_tokens(user_id, token) VALUES(?,?)", ( uid, wrong_token, )) with self.assertRaises(StoreError): yield self.store.exchange_refresh_token(last_token, generator.generate)
def generate_files(self, config, config_dir_path): if "signing_key" in config: return signing_key_path = config.get("signing_key_path") if signing_key_path is None: signing_key_path = os.path.join( config_dir_path, config["server_name"] + ".signing.key") if not self.path_exists(signing_key_path): print("Generating signing key file %s" % (signing_key_path, )) with open(signing_key_path, "w") as signing_key_file: key_id = "a_" + random_string(4) write_signing_keys(signing_key_file, (generate_signing_key(key_id), )) else: signing_keys = self.read_file(signing_key_path, "signing_key") if len(signing_keys.split("\n")[0].split()) == 1: # handle keys in the old format. key_id = "a_" + random_string(4) key = decode_signing_key_base64(NACL_ED25519, key_id, signing_keys.split("\n")[0]) with open(signing_key_path, "w") as signing_key_file: write_signing_keys(signing_key_file, (key, ))
def main() -> None: parser = argparse.ArgumentParser() parser.add_argument( "-o", "--output_file", type=argparse.FileType("w"), default=sys.stdout, help="Where to write the output to", ) args = parser.parse_args() key_id = "a_" + random_string(4) key = (generate_signing_key(key_id),) write_signing_keys(args.output_file, key)
async def on_PUT( self, request: SynapseRequest, room_id: str, event_type: str, state_key: str, txn_id: Optional[str] = None, ) -> Tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if txn_id: set_tag("txn_id", txn_id) content = parse_json_object_from_request(request) event_dict = { "type": event_type, "content": content, "room_id": room_id, "sender": requester.user.to_string(), } if state_key is not None: event_dict["state_key"] = state_key try: if event_type == EventTypes.Member: membership = content.get("membership", None) event_id, _ = await self.room_member_handler.update_membership( requester, target=UserID.from_string(state_key), room_id=room_id, action=membership, content=content, ) else: ( event, _, ) = await self.event_creation_handler.create_and_send_nonmember_event( requester, event_dict, txn_id=txn_id) event_id = event.event_id except ShadowBanError: event_id = "$" + random_string(43) set_tag("event_id", event_id) ret = {"event_id": event_id} return 200, ret
async def check_device_registered(self, user_id, device_id, initial_device_display_name=None): """ If the given device has not been registered, register it with the supplied display name. If no device_id is supplied, we make one up. Args: user_id (str): @user:id device_id (str | None): device id supplied by client initial_device_display_name (str | None): device display name from client Returns: str: device id (generated if none was supplied) """ self._check_device_name_length(initial_device_display_name) if device_id is not None: new_device = await self.store.store_device( user_id=user_id, device_id=device_id, initial_device_display_name=initial_device_display_name, ) if new_device: await self.notify_device_update(user_id, [device_id]) return device_id # if the device id is not specified, we'll autogen one, but loop a few # times in case of a clash. attempts = 0 while attempts < 5: device_id = stringutils.random_string(10).upper() new_device = await self.store.store_device( user_id=user_id, device_id=device_id, initial_device_display_name=initial_device_display_name, ) if new_device: await self.notify_device_update(user_id, [device_id]) return device_id attempts += 1 raise errors.StoreError(500, "Couldn't generate a device ID.")
def _download_remote_file(self, server_name, media_id): file_id = random_string(24) fname = self.filepaths.remote_media_filepath(server_name, file_id) self._makedirs(fname) try: with open(fname, "wb") as f: request_path = "/".join(( "/_matrix/media/v1/download", server_name, media_id, )) length, headers = yield self.client.get_file( server_name, request_path, output_stream=f, max_size=self.max_upload_size, ) media_type = headers["Content-Type"][0] time_now_ms = self.clock.time_msec() yield self.store.store_cached_remote_media( origin=server_name, media_id=media_id, media_type=media_type, time_now_ms=self.clock.time_msec(), upload_name=None, media_length=length, filesystem_id=file_id, ) except: os.remove(fname) raise media_info = { "media_type": media_type, "media_length": length, "upload_name": None, "created_ts": time_now_ms, "filesystem_id": file_id, } yield self._generate_remote_thumbnails(server_name, media_id, media_info) defer.returnValue(media_info)
def _get_session_info(self, session_id: Optional[str]) -> dict: """ Gets or creates a session given a session ID. The session can be used to track data across multiple requests, e.g. for interactive authentication. """ if session_id not in self.sessions: session_id = None if not session_id: # create a new session while session_id is None or session_id in self.sessions: session_id = stringutils.random_string(24) self.sessions[session_id] = {"id": session_id} return self.sessions[session_id]
def _download_remote_file(self, server_name, media_id): file_id = random_string(24) fname = self.filepaths.remote_media_filepath( server_name, file_id ) self._makedirs(fname) try: with open(fname, "wb") as f: request_path = "/".join(( "/_matrix/media/v1/download", server_name, media_id, )) length, headers = yield self.client.get_file( server_name, request_path, output_stream=f, max_size=self.max_upload_size, ) media_type = headers["Content-Type"][0] time_now_ms = self.clock.time_msec() yield self.store.store_cached_remote_media( origin=server_name, media_id=media_id, media_type=media_type, time_now_ms=self.clock.time_msec(), upload_name=None, media_length=length, filesystem_id=file_id, ) except: os.remove(fname) raise media_info = { "media_type": media_type, "media_length": length, "upload_name": None, "created_ts": time_now_ms, "filesystem_id": file_id, } yield self._generate_remote_thumbnails( server_name, media_id, media_info ) defer.returnValue(media_info)
async def on_PUT_or_POST( self, request: SynapseRequest, room_id: str, parent_id: str, relation_type: str, event_type: str, txn_id: Optional[str] = None, ) -> Tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) if event_type == EventTypes.Member: # Add relations to a membership is meaningless, so we just deny it # at the CS API rather than trying to handle it correctly. raise SynapseError(400, "Cannot send member events with relations") content = parse_json_object_from_request(request) aggregation_key = parse_string(request, "key", encoding="utf-8") content["m.relates_to"] = { "event_id": parent_id, "rel_type": relation_type, } if aggregation_key is not None: content["m.relates_to"]["key"] = aggregation_key event_dict = { "type": event_type, "content": content, "room_id": room_id, "sender": requester.user.to_string(), } try: ( event, _, ) = await self.event_creation_handler.create_and_send_nonmember_event( requester, event_dict=event_dict, txn_id=txn_id ) event_id = event.event_id except ShadowBanError: event_id = "$" + random_string(43) return 200, {"event_id": event_id}
async def create_session(self, session_type: str, value: JsonDict, expiry_ms: int) -> str: """ Creates a new pagination session for the room hierarchy endpoint. Args: session_type: The type for this session. value: The value to store. expiry_ms: How long before an item is evicted from the cache in milliseconds. Default is 0, indicating items never get evicted based on time. Returns: The newly created session ID. Raises: StoreError if a unique session ID cannot be generated. """ # autogen a session ID and try to create it. We may clash, so just # try a few times till one goes through, giving up eventually. attempts = 0 while attempts < 5: session_id = stringutils.random_string(24) try: await self.db_pool.simple_insert( table="sessions", values={ "session_id": session_id, "session_type": session_type, "value": json_encoder.encode(value), "expiry_time_ms": self.hs.get_clock().time_msec() + expiry_ms, }, desc="create_session", ) return session_id except self.db_pool.engine.module.IntegrityError: attempts += 1 raise StoreError(500, "Couldn't generate a session ID.")
def send_device_message(self, sender_user_id, message_type, messages): local_messages = {} remote_messages = {} for user_id, by_device in messages.items(): # we use UserID.from_string to catch invalid user ids if self.is_mine(UserID.from_string(user_id)): messages_by_device = { device_id: { "content": message_content, "type": message_type, "sender": sender_user_id, } for device_id, message_content in by_device.items() } if messages_by_device: local_messages[user_id] = messages_by_device else: destination = get_domain_from_id(user_id) remote_messages.setdefault(destination, {})[user_id] = by_device message_id = random_string(16) remote_edu_contents = {} for destination, messages in remote_messages.items(): remote_edu_contents[destination] = { "messages": messages, "sender": sender_user_id, "type": message_type, "message_id": message_id, } stream_id = yield self.store.add_messages_to_device_inbox( local_messages, remote_edu_contents ) self.notifier.on_new_event( "to_device_key", stream_id, users=local_messages.keys() ) for destination in remote_messages.keys(): # Enqueue a new federation transaction to send the new # device messages to each remote destination. self.federation.send_device_messages(destination)
async def create_content( self, media_type: str, upload_name: Optional[str], content: IO, content_length: int, auth_user: str, ) -> str: """Store uploaded content for a local user and return the mxc URL Args: media_type: The content type of the file. upload_name: The name of the file, if provided. content: A file like object that is the content to store content_length: The length of the content auth_user: The user_id of the uploader Returns: The mxc url of the stored content """ media_id = random_string(24) file_info = FileInfo(server_name=None, file_id=media_id) fname = await self.media_storage.store_file(content, file_info) logger.info("Stored local media in file %r", fname) for filter in self.message_filters: await filter.check_upload(fname, auth_user) await self.store.store_local_media( media_id=media_id, media_type=media_type, time_now_ms=self.clock.time_msec(), upload_name=upload_name, media_length=content_length, user_id=auth_user, ) await self._generate_thumbnails(None, media_id, media_id, media_type) return "mxc://%s/%s" % (self.server_name, media_id)
async def _get_remote_media_impl( self, server_name: str, media_id: str) -> Tuple[Optional[Responder], dict]: """Looks for media in local cache, if not there then attempt to download from remote server. Args: server_name (str): Remote server_name where the media originated. media_id (str): The media ID of the content (as defined by the remote server). Returns: A tuple of responder and the media info of the file. """ media_info = await self.store.get_cached_remote_media( server_name, media_id) # file_id is the ID we use to track the file locally. If we've already # seen the file then reuse the existing ID, otherwise genereate a new # one. if media_info: file_id = media_info["filesystem_id"] else: file_id = random_string(24) file_info = FileInfo(server_name, file_id) # If we have an entry in the DB, try and look for it if media_info: if media_info["quarantined_by"]: logger.info("Media is quarantined") raise NotFoundError() responder = await self.media_storage.fetch_media(file_info) if responder: return responder, media_info # Failed to find the file anywhere, lets download it. media_info = await self._download_remote_file(server_name, media_id, file_id) responder = await self.media_storage.fetch_media(file_info) return responder, media_info
def _get_remote_media_impl(self, server_name, media_id): """Looks for media in local cache, if not there then attempt to download from remote server. Args: server_name (str): Remote server_name where the media originated. media_id (str): The media ID of the content (as defined by the remote server). Returns: Deferred[(Responder, media_info)] """ media_info = yield self.store.get_cached_remote_media( server_name, media_id ) # file_id is the ID we use to track the file locally. If we've already # seen the file then reuse the existing ID, otherwise genereate a new # one. if media_info: file_id = media_info["filesystem_id"] else: file_id = random_string(24) file_info = FileInfo(server_name, file_id) # If we have an entry in the DB, try and look for it if media_info: if media_info["quarantined_by"]: logger.info("Media is quarantined") raise NotFoundError() responder = yield self.media_storage.fetch_media(file_info) if responder: defer.returnValue((responder, media_info)) # Failed to find the file anywhere, lets download it. media_info = yield self._download_remote_file( server_name, media_id, file_id ) responder = yield self.media_storage.fetch_media(file_info) defer.returnValue((responder, media_info))
def _create_duplicate_event(self, txn_id: str) -> Tuple[EventBase, EventContext]: """Create a new event with the given transaction ID. All events produced by this method will be considered duplicates. """ # We create a new event with a random body, as otherwise we'll produce # *exactly* the same event with the same hash, and so same event ID. return self.get_success( self.handler.create_event( self.requester, { "type": EventTypes.Message, "room_id": self.room_id, "sender": self.requester.user.to_string(), "content": {"msgtype": "m.text", "body": random_string(5)}, }, txn_id=txn_id, ) )
def test_loading_missing_templates(self): # Use a temporary directory that exists on the system, but that isn't likely to # contain template files with tempfile.TemporaryDirectory() as tmp_dir: # Attempt to load an HTML template from our custom template directory template = self.hs.config.read_templates(["sso_error.html"], tmp_dir)[0] # If no errors, we should've gotten the default template instead # Render the template a_random_string = random_string(5) html_content = template.render({"error_description": a_random_string}) # Check that our string exists in the template self.assertIn( a_random_string, html_content, "Template file did not contain our test string", )
async def _generate_poll_id(self, creator_id, poll_alias, room_id): try: random_string = stringutils.random_string(18) gen_poll_id = PollID(random_string, room_id).to_string() if isinstance(gen_poll_id, bytes): gen_poll_id = gen_poll_id.decode("utf-8") await self.store.store_poll(poll_id=gen_poll_id, poll_creator_user_id=creator_id, poll_alias=poll_alias, room_id=room_id) dict = { 'poll_id': gen_poll_id, 'poll_creator_user_id': creator_id, 'poll_alias': poll_alias, 'room_id': room_id } return dict except StoreError as e: logger.error("STORE ERROR", "", e)
def _generate_room_id(self, creator_id, is_public): # autogen room IDs and try to create it. We may clash, so just # try a few times till one goes through, giving up eventually. attempts = 0 while attempts < 5: try: random_string = stringutils.random_string(18) gen_room_id = RoomID(random_string, self.hs.hostname).to_string() if isinstance(gen_room_id, bytes): gen_room_id = gen_room_id.decode("utf-8") yield self.store.store_room( room_id=gen_room_id, room_creator_user_id=creator_id, is_public=is_public, ) return gen_room_id except StoreError: attempts += 1 raise StoreError(500, "Couldn't generate a room ID.")
def check_device_registered(self, user_id, device_id, initial_device_display_name=None): """ If the given device has not been registered, register it with the supplied display name. If no device_id is supplied, we make one up. Args: user_id (str): @user:id device_id (str | None): device id supplied by client initial_device_display_name (str | None): device display name from client Returns: str: device id (generated if none was supplied) """ if device_id is not None: yield self.store.store_device( user_id=user_id, device_id=device_id, initial_device_display_name=initial_device_display_name, ignore_if_known=True, ) defer.returnValue(device_id) # if the device id is not specified, we'll autogen one, but loop a few # times in case of a clash. attempts = 0 while attempts < 5: try: device_id = stringutils.random_string(10).upper() yield self.store.store_device( user_id=user_id, device_id=device_id, initial_device_display_name=initial_device_display_name, ignore_if_known=False, ) defer.returnValue(device_id) except errors.StoreError: attempts += 1 raise errors.StoreError(500, "Couldn't generate a device ID.")
def on_POST(self, request, user_id): requester = yield self.auth.get_user_by_req(request) if user_id != requester.user.to_string(): raise AuthError(403, "Cannot request tokens for other users.") # Parse the request body to make sure it's JSON, but ignore the contents # for now. parse_json_object_from_request(request) token = random_string(24) ts_valid_until_ms = self.clock.time_msec() + self.EXPIRES_MS yield self.store.insert_open_id_token(token, ts_valid_until_ms, user_id) defer.returnValue((200, { "access_token": token, "token_type": "Bearer", "matrix_server_name": self.server_name, "expires_in": self.EXPIRES_MS / 1000, }))
def _get_renewal_token(self, user_id): """Generates a 32-byte long random string that will be inserted into the user's renewal email's unique link, then saves it into the database. Args: user_id (str): ID of the user to generate a string for. Returns: defer.Deferred[str]: The generated string. Raises: StoreError(500): Couldn't generate a unique string after 5 attempts. """ attempts = 0 while attempts < 5: try: renewal_token = stringutils.random_string(32) yield self.store.set_renewal_token_for_user(user_id, renewal_token) defer.returnValue(renewal_token) except StoreError: attempts += 1 raise StoreError(500, "Couldn't generate a unique string as refresh string.")
def _generate_room_id(self, creator_id, is_public): # autogen room IDs and try to create it. We may clash, so just # try a few times till one goes through, giving up eventually. attempts = 0 while attempts < 5: try: random_string = stringutils.random_string(18) gen_room_id = RoomID( random_string, self.hs.hostname, ).to_string() if isinstance(gen_room_id, bytes): gen_room_id = gen_room_id.decode('utf-8') yield self.store.store_room( room_id=gen_room_id, room_creator_user_id=creator_id, is_public=is_public, ) defer.returnValue(gen_room_id) except StoreError: attempts += 1 raise StoreError(500, "Couldn't generate a room ID.")
def create_content(self, media_type, upload_name, content, content_length, auth_user): """Store uploaded content for a local user and return the mxc URL Args: media_type(str): The content type of the file upload_name(str): The name of the file content: A file like object that is the content to store content_length(int): The length of the content auth_user(str): The user_id of the uploader Returns: Deferred[str]: The mxc url of the stored content """ media_id = random_string(24) file_info = FileInfo( server_name=None, file_id=media_id, ) fname = yield self.media_storage.store_file(content, file_info) logger.info("Stored local media in file %r", fname) yield self.store.store_local_media( media_id=media_id, media_type=media_type, time_now_ms=self.clock.time_msec(), upload_name=upload_name, media_length=content_length, user_id=auth_user, ) yield self._generate_thumbnails( None, media_id, media_id, media_type, ) defer.returnValue("mxc://%s/%s" % (self.server_name, media_id))
def create_event(self, etype=None, **kwargs): kwargs["type"] = etype if "event_id" not in kwargs: kwargs["event_id"] = "%s@%s" % ( random_string(10), self.hs.hostname ) if "ts" not in kwargs: kwargs["ts"] = int(self.clock.time_msec()) # The "age" key is a delta timestamp that should be converted into an # absolute timestamp the minute we see it. if "age" in kwargs: kwargs["age_ts"] = int(self.clock.time_msec()) - int(kwargs["age"]) del kwargs["age"] elif "age_ts" not in kwargs: kwargs["age_ts"] = int(self.clock.time_msec()) if etype in self._event_list: handler = self._event_list[etype] else: handler = GenericEvent return handler(**kwargs)
def create_room(self, user_id, room_id, config): """ Creates a new room. Args: user_id (str): The ID of the user creating the new room. room_id (str): The proposed ID for the new room. Can be None, in which case one will be created for you. config (dict) : A dict of configuration options. Returns: The new room ID. Raises: SynapseError if the room ID was taken, couldn't be stored, or something went horribly wrong. """ self.ratelimit(user_id) if "room_alias_name" in config: room_alias = RoomAlias.create_local( config["room_alias_name"], self.hs ) mapping = yield self.store.get_association_from_room_alias( room_alias ) if mapping: raise SynapseError(400, "Room alias already taken") else: room_alias = None invite_list = config.get("invite", []) for i in invite_list: try: self.hs.parse_userid(i) except: raise SynapseError(400, "Invalid user_id: %s" % (i,)) is_public = config.get("visibility", None) == "public" if room_id: # Ensure room_id is the correct type room_id_obj = RoomID.from_string(room_id, self.hs) if not room_id_obj.is_mine: raise SynapseError(400, "Room id must be local") yield self.store.store_room( room_id=room_id, room_creator_user_id=user_id, is_public=is_public ) else: # autogen room IDs and try to create it. We may clash, so just # try a few times till one goes through, giving up eventually. attempts = 0 room_id = None while attempts < 5: try: random_string = stringutils.random_string(18) gen_room_id = RoomID.create_local(random_string, self.hs) yield self.store.store_room( room_id=gen_room_id.to_string(), room_creator_user_id=user_id, is_public=is_public ) room_id = gen_room_id.to_string() break except StoreError: attempts += 1 if not room_id: raise StoreError(500, "Couldn't generate a room ID.") if room_alias: directory_handler = self.hs.get_handlers().directory_handler yield directory_handler.create_association( user_id=user_id, room_id=room_id, room_alias=room_alias, servers=[self.hs.hostname], ) user = self.hs.parse_userid(user_id) creation_events = self._create_events_for_new_room( user, room_id, is_public=is_public ) room_member_handler = self.hs.get_handlers().room_member_handler @defer.inlineCallbacks def handle_event(event): snapshot = yield self.store.snapshot_room(event) logger.debug("Event: %s", event) if event.type == RoomMemberEvent.TYPE: yield room_member_handler.change_membership( event, do_auth=True ) else: yield self._on_new_room_event( event, snapshot, extra_users=[user], suppress_auth=True ) for event in creation_events: yield handle_event(event) if "name" in config: name = config["name"] name_event = self.event_factory.create_event( etype=RoomNameEvent.TYPE, room_id=room_id, user_id=user_id, content={"name": name}, ) yield handle_event(name_event) if "topic" in config: topic = config["topic"] topic_event = self.event_factory.create_event( etype=RoomTopicEvent.TYPE, room_id=room_id, user_id=user_id, content={"topic": topic}, ) yield handle_event(topic_event) content = {"membership": Membership.INVITE} for invitee in invite_list: invite_event = self.event_factory.create_event( etype=RoomMemberEvent.TYPE, state_key=invitee, room_id=room_id, user_id=user_id, content=content ) yield handle_event(invite_event) result = {"room_id": room_id} if room_alias: result["room_alias"] = room_alias.to_string() yield directory_handler.send_room_alias_update_event( user_id, room_id ) defer.returnValue(result)
def _make_group_id(clock): return str(int(clock.time_msec())) + random_string(5)
def _download_url(self, url, user): # TODO: we should probably honour robots.txt... except in practice # we're most likely being explicitly triggered by a human rather than a # bot, so are we really a robot? # XXX: horrible duplication with base_resource's _download_remote_file() file_id = random_string(24) fname = self.filepaths.local_media_filepath(file_id) self.media_repo._makedirs(fname) try: with open(fname, "wb") as f: logger.debug("Trying to get url '%s'" % url) length, headers, uri, code = yield self.client.get_file( url, output_stream=f, max_size=self.max_spider_size, ) # FIXME: pass through 404s and other error messages nicely media_type = headers["Content-Type"][0] time_now_ms = self.clock.time_msec() content_disposition = headers.get("Content-Disposition", None) if content_disposition: _, params = cgi.parse_header(content_disposition[0],) download_name = None # First check if there is a valid UTF-8 filename download_name_utf8 = params.get("filename*", None) if download_name_utf8: if download_name_utf8.lower().startswith("utf-8''"): download_name = download_name_utf8[7:] # If there isn't check for an ascii name. if not download_name: download_name_ascii = params.get("filename", None) if download_name_ascii and is_ascii(download_name_ascii): download_name = download_name_ascii if download_name: download_name = urlparse.unquote(download_name) try: download_name = download_name.decode("utf-8") except UnicodeDecodeError: download_name = None else: download_name = None yield self.store.store_local_media( media_id=file_id, media_type=media_type, time_now_ms=self.clock.time_msec(), upload_name=download_name, media_length=length, user_id=user, ) except Exception as e: os.remove(fname) raise SynapseError( 500, ("Failed to download content: %s" % e), Codes.UNKNOWN ) defer.returnValue({ "media_type": media_type, "media_length": length, "download_name": download_name, "created_ts": time_now_ms, "filesystem_id": file_id, "filename": fname, "uri": uri, "response_code": code, # FIXME: we should calculate a proper expiration based on the # Cache-Control and Expire headers. But for now, assume 1 hour. "expires": 60 * 60 * 1000, "etag": headers["ETag"][0] if "ETag" in headers else None, })
def _download_remote_file(self, server_name, media_id): file_id = random_string(24) fname = self.filepaths.remote_media_filepath( server_name, file_id ) self._makedirs(fname) try: with open(fname, "wb") as f: request_path = "/".join(( "/_matrix/media/v1/download", server_name, media_id, )) try: length, headers = yield self.client.get_file( server_name, request_path, output_stream=f, max_size=self.max_upload_size, ) except Exception as e: logger.warn("Failed to fetch remoted media %r", e) raise SynapseError(502, "Failed to fetch remoted media") media_type = headers["Content-Type"][0] time_now_ms = self.clock.time_msec() content_disposition = headers.get("Content-Disposition", None) if content_disposition: _, params = cgi.parse_header(content_disposition[0],) upload_name = None # First check if there is a valid UTF-8 filename upload_name_utf8 = params.get("filename*", None) if upload_name_utf8: if upload_name_utf8.lower().startswith("utf-8''"): upload_name = upload_name_utf8[7:] # If there isn't check for an ascii name. if not upload_name: upload_name_ascii = params.get("filename", None) if upload_name_ascii and is_ascii(upload_name_ascii): upload_name = upload_name_ascii if upload_name: upload_name = urlparse.unquote(upload_name) try: upload_name = upload_name.decode("utf-8") except UnicodeDecodeError: upload_name = None else: upload_name = None yield self.store.store_cached_remote_media( origin=server_name, media_id=media_id, media_type=media_type, time_now_ms=self.clock.time_msec(), upload_name=upload_name, media_length=length, filesystem_id=file_id, ) except: os.remove(fname) raise media_info = { "media_type": media_type, "media_length": length, "upload_name": upload_name, "created_ts": time_now_ms, "filesystem_id": file_id, } yield self._generate_remote_thumbnails( server_name, media_id, media_info ) defer.returnValue(media_info)
def create_room(self, user_id, room_id, config): """ Creates a new room. Args: user_id (str): The ID of the user creating the new room. room_id (str): The proposed ID for the new room. Can be None, in which case one will be created for you. config (dict) : A dict of configuration options. Returns: The new room ID. Raises: SynapseError if the room ID was taken, couldn't be stored, or something went horribly wrong. """ self.ratelimit(user_id) if "room_alias_name" in config: for wchar in string.whitespace: if wchar in config["room_alias_name"]: raise SynapseError(400, "Invalid characters in room alias") room_alias = RoomAlias.create( config["room_alias_name"], self.hs.hostname, ) mapping = yield self.store.get_association_from_room_alias( room_alias ) if mapping: raise SynapseError(400, "Room alias already taken") else: room_alias = None invite_list = config.get("invite", []) for i in invite_list: try: UserID.from_string(i) except: raise SynapseError(400, "Invalid user_id: %s" % (i,)) is_public = config.get("visibility", None) == "public" if room_id: # Ensure room_id is the correct type room_id_obj = RoomID.from_string(room_id) if not self.hs.is_mine(room_id_obj): raise SynapseError(400, "Room id must be local") yield self.store.store_room( room_id=room_id, room_creator_user_id=user_id, is_public=is_public ) else: # autogen room IDs and try to create it. We may clash, so just # try a few times till one goes through, giving up eventually. attempts = 0 room_id = None while attempts < 5: try: random_string = stringutils.random_string(18) gen_room_id = RoomID.create( random_string, self.hs.hostname, ) yield self.store.store_room( room_id=gen_room_id.to_string(), room_creator_user_id=user_id, is_public=is_public ) room_id = gen_room_id.to_string() break except StoreError: attempts += 1 if not room_id: raise StoreError(500, "Couldn't generate a room ID.") if room_alias: directory_handler = self.hs.get_handlers().directory_handler yield directory_handler.create_association( user_id=user_id, room_id=room_id, room_alias=room_alias, servers=[self.hs.hostname], ) preset_config = config.get( "preset", RoomCreationPreset.PUBLIC_CHAT if is_public else RoomCreationPreset.PRIVATE_CHAT ) raw_initial_state = config.get("initial_state", []) initial_state = OrderedDict() for val in raw_initial_state: initial_state[(val["type"], val.get("state_key", ""))] = val["content"] creation_content = config.get("creation_content", {}) user = UserID.from_string(user_id) creation_events = self._create_events_for_new_room( user, room_id, preset_config=preset_config, invite_list=invite_list, initial_state=initial_state, creation_content=creation_content, room_alias=room_alias, ) msg_handler = self.hs.get_handlers().message_handler for event in creation_events: yield msg_handler.create_and_send_event(event, ratelimit=False) if "name" in config: name = config["name"] yield msg_handler.create_and_send_event({ "type": EventTypes.Name, "room_id": room_id, "sender": user_id, "state_key": "", "content": {"name": name}, }, ratelimit=False) if "topic" in config: topic = config["topic"] yield msg_handler.create_and_send_event({ "type": EventTypes.Topic, "room_id": room_id, "sender": user_id, "state_key": "", "content": {"topic": topic}, }, ratelimit=False) for invitee in invite_list: yield msg_handler.create_and_send_event({ "type": EventTypes.Member, "state_key": invitee, "room_id": room_id, "sender": user_id, "content": {"membership": Membership.INVITE}, }, ratelimit=False) result = {"room_id": room_id} if room_alias: result["room_alias"] = room_alias.to_string() yield directory_handler.send_room_alias_update_event( user_id, room_id ) defer.returnValue(result)