def test_sign_minimal(self): builder = EventBuilder( { 'event_id': "$0:domain", 'origin': "domain", 'origin_server_ts': 1000000, 'signatures': {}, 'type': "X", 'unsigned': {'age_ts': 1000000}, }, ) add_hashes_and_signatures(builder, HOSTNAME, self.signing_key) event = builder.build() self.assertTrue(hasattr(event, 'hashes')) self.assertIn('sha256', event.hashes) self.assertEquals( event.hashes['sha256'], "6tJjLpXtggfke8UxFhAKg82QVkJzvKOVOOSjUDK4ZSI", ) self.assertTrue(hasattr(event, 'signatures')) self.assertIn(HOSTNAME, event.signatures) self.assertIn(KEY_NAME, event.signatures["domain"]) self.assertEquals( event.signatures[HOSTNAME][KEY_NAME], "2Wptgo4CwmLo/Y8B8qinxApKaCkBG2fjTWB7AbP5Uy+" "aIbygsSdLOFzvdDjww8zUVKCmI02eP9xtyJxc/cLiBA", )
def test_sign_message(self): builder = EventBuilder( { 'content': {'body': "Here is the message content"}, 'event_id': "$0:domain", 'origin': "domain", 'origin_server_ts': 1000000, 'type': "m.room.message", 'room_id': "!r:domain", 'sender': "@u:domain", 'signatures': {}, 'unsigned': {'age_ts': 1000000}, } ) add_hashes_and_signatures(builder, HOSTNAME, self.signing_key) event = builder.build() self.assertTrue(hasattr(event, 'hashes')) self.assertIn('sha256', event.hashes) self.assertEquals( event.hashes['sha256'], "onLKD1bGljeBWQhWZ1kaP9SorVmRQNdN5aM2JYU2n/g" ) self.assertTrue(hasattr(event, 'signatures')) self.assertIn(HOSTNAME, event.signatures) self.assertIn(KEY_NAME, event.signatures["domain"]) self.assertEquals( event.signatures[HOSTNAME][KEY_NAME], "Wm+VzmOUOz08Ds+0NTWb1d4CZrVsJSikkeRxh6aCcUw" "u6pNC78FunoD7KNWzqFn241eYHYMGCA5McEiVPdhzBA", )
def test_sign_minimal(self): builder = EventBuilder( { 'event_id': "$0:domain", 'origin': "domain", 'origin_server_ts': 1000000, 'signatures': {}, 'type': "X", 'unsigned': { 'age_ts': 1000000 }, }, ) add_hashes_and_signatures(builder, HOSTNAME, self.signing_key) event = builder.build() self.assertTrue(hasattr(event, 'hashes')) self.assertIn('sha256', event.hashes) self.assertEquals( event.hashes['sha256'], "6tJjLpXtggfke8UxFhAKg82QVkJzvKOVOOSjUDK4ZSI", ) self.assertTrue(hasattr(event, 'signatures')) self.assertIn(HOSTNAME, event.signatures) self.assertIn(KEY_NAME, event.signatures["domain"]) self.assertEquals( event.signatures[HOSTNAME][KEY_NAME], "2Wptgo4CwmLo/Y8B8qinxApKaCkBG2fjTWB7AbP5Uy+" "aIbygsSdLOFzvdDjww8zUVKCmI02eP9xtyJxc/cLiBA", )
def test_sign_message(self): builder = EventBuilder({ 'content': { 'body': "Here is the message content", }, 'event_id': "$0:domain", 'origin': "domain", 'origin_server_ts': 1000000, 'type': "m.room.message", 'room_id': "!r:domain", 'sender': "@u:domain", 'signatures': {}, 'unsigned': { 'age_ts': 1000000 }, }) add_hashes_and_signatures(builder, HOSTNAME, self.signing_key) event = builder.build() self.assertTrue(hasattr(event, 'hashes')) self.assertIn('sha256', event.hashes) self.assertEquals( event.hashes['sha256'], "onLKD1bGljeBWQhWZ1kaP9SorVmRQNdN5aM2JYU2n/g", ) self.assertTrue(hasattr(event, 'signatures')) self.assertIn(HOSTNAME, event.signatures) self.assertIn(KEY_NAME, event.signatures["domain"]) self.assertEquals( event.signatures[HOSTNAME][KEY_NAME], "Wm+VzmOUOz08Ds+0NTWb1d4CZrVsJSikkeRxh6aCcUw" "u6pNC78FunoD7KNWzqFn241eYHYMGCA5McEiVPdhzBA")
def test_sign_minimal(self): event_dict = { "event_id": "$0:domain", "origin": "domain", "origin_server_ts": 1000000, "signatures": {}, "type": "X", "unsigned": { "age_ts": 1000000 }, } add_hashes_and_signatures(RoomVersions.V1, event_dict, HOSTNAME, self.signing_key) event = make_event_from_dict(event_dict) self.assertTrue(hasattr(event, "hashes")) self.assertIn("sha256", event.hashes) self.assertEquals(event.hashes["sha256"], "6tJjLpXtggfke8UxFhAKg82QVkJzvKOVOOSjUDK4ZSI") self.assertTrue(hasattr(event, "signatures")) self.assertIn(HOSTNAME, event.signatures) self.assertIn(KEY_NAME, event.signatures["domain"]) self.assertEquals( event.signatures[HOSTNAME][KEY_NAME], "2Wptgo4CwmLo/Y8B8qinxApKaCkBG2fjTWB7AbP5Uy+" "aIbygsSdLOFzvdDjww8zUVKCmI02eP9xtyJxc/cLiBA", )
def test_sign_message(self): event_dict = { "content": { "body": "Here is the message content" }, "event_id": "$0:domain", "origin": "domain", "origin_server_ts": 1000000, "type": "m.room.message", "room_id": "!r:domain", "sender": "@u:domain", "signatures": {}, "unsigned": { "age_ts": 1000000 }, } add_hashes_and_signatures(RoomVersions.V1, event_dict, HOSTNAME, self.signing_key) event = make_event_from_dict(event_dict) self.assertTrue(hasattr(event, "hashes")) self.assertIn("sha256", event.hashes) self.assertEquals(event.hashes["sha256"], "onLKD1bGljeBWQhWZ1kaP9SorVmRQNdN5aM2JYU2n/g") self.assertTrue(hasattr(event, "signatures")) self.assertIn(HOSTNAME, event.signatures) self.assertIn(KEY_NAME, event.signatures["domain"]) self.assertEquals( event.signatures[HOSTNAME][KEY_NAME], "Wm+VzmOUOz08Ds+0NTWb1d4CZrVsJSikkeRxh6aCcUw" "u6pNC78FunoD7KNWzqFn241eYHYMGCA5McEiVPdhzBA", )
def _on_new_room_event(self, event, snapshot, extra_destinations=[], extra_users=[], suppress_auth=False, do_invite_host=None): yield run_on_reactor() snapshot.fill_out_prev_events(event) yield self.state_handler.annotate_event_with_state(event) yield self.auth.add_auth_events(event) logger.debug("Signing event...") add_hashes_and_signatures( event, self.server_name, self.signing_key ) logger.debug("Signed event.") if not suppress_auth: logger.debug("Authing...") self.auth.check(event, auth_events=event.old_state_events) logger.debug("Authed") else: logger.debug("Suppressed auth.") if do_invite_host: federation_handler = self.hs.get_handlers().federation_handler invite_event = yield federation_handler.send_invite( do_invite_host, event ) # FIXME: We need to check if the remote changed anything else event.signatures = invite_event.signatures yield self.store.persist_event(event) destinations = set(extra_destinations) # Send a PDU to all hosts who have joined the room. for k, s in event.state_events.items(): try: if k[0] == RoomMemberEvent.TYPE: if s.content["membership"] == Membership.JOIN: destinations.add( self.hs.parse_userid(s.state_key).domain ) except: logger.warn( "Failed to get destination from event %s", s.event_id ) event.destinations = list(destinations) yield self.notifier.on_new_room_event(event, extra_users=extra_users) federation_handler = self.hs.get_handlers().federation_handler yield federation_handler.handle_new_event(event, snapshot)
def test_send_join(self): """happy-path test of send_join""" joining_user = "******" + self.OTHER_SERVER_NAME join_result = self._make_join(joining_user) join_event_dict = join_result["event"] add_hashes_and_signatures( KNOWN_ROOM_VERSIONS[DEFAULT_ROOM_VERSION], join_event_dict, signature_name=self.OTHER_SERVER_NAME, signing_key=self.OTHER_SERVER_SIGNATURE_KEY, ) channel = self.make_signed_federation_request( "PUT", f"/_matrix/federation/v2/send_join/{self._room_id}/x", content=join_event_dict, ) self.assertEqual(channel.code, 200, channel.json_body) # we should get complete room state back returned_state = [(ev["type"], ev["state_key"]) for ev in channel.json_body["state"]] self.assertCountEqual( returned_state, [ ("m.room.create", ""), ("m.room.power_levels", ""), ("m.room.join_rules", ""), ("m.room.history_visibility", ""), ("m.room.member", "@kermit:test"), ("m.room.member", "@fozzie:test"), # nb: *not* the joining user ], ) # also check the auth chain returned_auth_chain_events = [(ev["type"], ev["state_key"]) for ev in channel.json_body["auth_chain"] ] self.assertCountEqual( returned_auth_chain_events, [ ("m.room.create", ""), ("m.room.member", "@kermit:test"), ("m.room.power_levels", ""), ("m.room.join_rules", ""), ], ) # the room should show that the new user is a member r = self.get_success( self._storage_controllers.state.get_current_state(self._room_id)) self.assertEqual(r[("m.room.member", joining_user)].membership, "join")
def create_local_event_from_event_dict(clock, hostname, signing_key, format_version, event_dict, internal_metadata_dict=None): """Takes a fully formed event dict, ensuring that fields like `origin` and `origin_server_ts` have correct values for a locally produced event, then signs and hashes it. Args: clock (Clock) hostname (str) signing_key format_version (int) event_dict (dict) internal_metadata_dict (dict|None) Returns: FrozenEvent """ if format_version not in KNOWN_EVENT_FORMAT_VERSIONS: raise Exception("No event format defined for version %r" % (format_version, )) if internal_metadata_dict is None: internal_metadata_dict = {} time_now = int(clock.time_msec()) if format_version == EventFormatVersions.V1: event_dict["event_id"] = _create_event_id(clock, hostname) event_dict["origin"] = hostname event_dict["origin_server_ts"] = time_now event_dict.setdefault("unsigned", {}) age = event_dict["unsigned"].pop("age", 0) event_dict["unsigned"].setdefault("age_ts", time_now - age) event_dict.setdefault("signatures", {}) add_hashes_and_signatures( event_dict, hostname, signing_key, ) return event_type_from_format_version(format_version)( event_dict, internal_metadata_dict=internal_metadata_dict, )
def _create_new_client_event(self, builder, prev_event_ids=None): if prev_event_ids: prev_events = yield self.store.add_event_hashes(prev_event_ids) prev_max_depth = yield self.store.get_max_depth_of_events(prev_event_ids) depth = prev_max_depth + 1 else: latest_ret = yield self.store.get_latest_event_ids_and_hashes_in_room( builder.room_id, ) if latest_ret: depth = max([d for _, _, d in latest_ret]) + 1 else: depth = 1 prev_events = [ (event_id, prev_hashes) for event_id, prev_hashes, _ in latest_ret ] builder.prev_events = prev_events builder.depth = depth state_handler = self.state_handler context = yield state_handler.compute_event_context(builder) if builder.is_state(): builder.prev_state = yield self.store.add_event_hashes( context.prev_state_events ) yield self.auth.add_auth_events(builder, context) signing_key = self.hs.config.signing_key[0] add_hashes_and_signatures( builder, self.server_name, signing_key ) event = builder.build() logger.debug( "Created event %s with current state: %s", event.event_id, context.current_state, ) defer.returnValue( (event, context,) )
def _create_new_client_event(self, builder, prev_event_ids=None): if prev_event_ids: prev_events = yield self.store.add_event_hashes(prev_event_ids) prev_max_depth = yield self.store.get_max_depth_of_events(prev_event_ids) depth = prev_max_depth + 1 else: latest_ret = yield self.store.get_latest_event_ids_and_hashes_in_room( builder.room_id, ) if latest_ret: depth = max([d for _, _, d in latest_ret]) + 1 else: depth = 1 prev_events = [ (event_id, prev_hashes) for event_id, prev_hashes, _ in latest_ret ] builder.prev_events = prev_events builder.depth = depth state_handler = self.state_handler context = yield state_handler.compute_event_context(builder) if builder.is_state(): builder.prev_state = yield self.store.add_event_hashes( context.prev_state_events ) yield self.auth.add_auth_events(builder, context) signing_key = self.hs.config.signing_key[0] add_hashes_and_signatures( builder, self.server_name, signing_key ) event = builder.build() logger.debug( "Created event %s with state: %s", event.event_id, context.prev_state_ids, ) defer.returnValue( (event, context,) )
def test_send_join_partial_state(self): """When MSC3706 support is enabled, /send_join should return partial state""" joining_user = "******" + self.OTHER_SERVER_NAME join_result = self._make_join(joining_user) join_event_dict = join_result["event"] add_hashes_and_signatures( KNOWN_ROOM_VERSIONS[DEFAULT_ROOM_VERSION], join_event_dict, signature_name=self.OTHER_SERVER_NAME, signing_key=self.OTHER_SERVER_SIGNATURE_KEY, ) channel = self.make_signed_federation_request( "PUT", f"/_matrix/federation/v2/send_join/{self._room_id}/x?org.matrix.msc3706.partial_state=true", content=join_event_dict, ) self.assertEqual(channel.code, 200, channel.json_body) # expect a reduced room state returned_state = [(ev["type"], ev["state_key"]) for ev in channel.json_body["state"]] self.assertCountEqual( returned_state, [ ("m.room.create", ""), ("m.room.power_levels", ""), ("m.room.join_rules", ""), ("m.room.history_visibility", ""), ], ) # the auth chain should not include anything already in "state" returned_auth_chain_events = [(ev["type"], ev["state_key"]) for ev in channel.json_body["auth_chain"] ] self.assertCountEqual( returned_auth_chain_events, [ ("m.room.member", "@kermit:test"), ], ) # the room should show that the new user is a member r = self.get_success( self._storage_controllers.state.get_current_state(self._room_id)) self.assertEqual(r[("m.room.member", joining_user)].membership, "join")
def add_hashes_and_signatures( self, event_dict: JsonDict, room_version: RoomVersion = KNOWN_ROOM_VERSIONS[DEFAULT_ROOM_VERSION], ) -> JsonDict: """Adds hashes and signatures to the given event dict Returns: The modified event dict, for convenience """ add_hashes_and_signatures( room_version, event_dict, signature_name=self.OTHER_SERVER_NAME, signing_key=self.OTHER_SERVER_SIGNATURE_KEY, ) return event_dict
def _create_new_client_event(self, builder): yield run_on_reactor() latest_ret = yield self.store.get_latest_events_in_room( builder.room_id, ) if latest_ret: depth = max([d for _, _, d in latest_ret]) + 1 else: depth = 1 prev_events = [(e, h) for e, h, _ in latest_ret] builder.prev_events = prev_events builder.depth = depth state_handler = self.state_handler context = yield state_handler.compute_event_context(builder) if builder.is_state(): builder.prev_state = context.prev_state_events yield self.auth.add_auth_events(builder, context) add_hashes_and_signatures( builder, self.server_name, self.signing_key ) event = builder.build() logger.debug( "Created event %s with auth_events: %s, current state: %s", event.event_id, context.auth_events, context.current_state, ) defer.returnValue( (event, context,) )
def _create_new_client_event(self, builder): yield run_on_reactor() latest_ret = yield self.store.get_latest_events_in_room( builder.room_id, ) if latest_ret: depth = max([d for _, _, d in latest_ret]) + 1 else: depth = 1 prev_events = [(e, h) for e, h, _ in latest_ret] builder.prev_events = prev_events builder.depth = depth state_handler = self.state_handler context = yield state_handler.compute_event_context(builder) if builder.is_state(): builder.prev_state = context.prev_state_events yield self.auth.add_auth_events(builder, context) add_hashes_and_signatures(builder, self.server_name, self.signing_key) event = builder.build() logger.debug( "Created event %s with auth_events: %s, current state: %s", event.event_id, context.auth_events, context.current_state, ) defer.returnValue(( event, context, ))
def create_local_event_from_event_dict( clock: Clock, hostname: str, signing_key: SigningKey, room_version: RoomVersion, event_dict: JsonDict, internal_metadata_dict: Optional[JsonDict] = None, ) -> EventBase: """Takes a fully formed event dict, ensuring that fields like `origin` and `origin_server_ts` have correct values for a locally produced event, then signs and hashes it. """ format_version = room_version.event_format if format_version not in KNOWN_EVENT_FORMAT_VERSIONS: raise Exception("No event format defined for version %r" % (format_version, )) if internal_metadata_dict is None: internal_metadata_dict = {} time_now = int(clock.time_msec()) if format_version == EventFormatVersions.V1: event_dict["event_id"] = _create_event_id(clock, hostname) event_dict["origin"] = hostname event_dict.setdefault("origin_server_ts", time_now) event_dict.setdefault("unsigned", {}) age = event_dict["unsigned"].pop("age", 0) event_dict["unsigned"].setdefault("age_ts", time_now - age) event_dict.setdefault("signatures", {}) add_hashes_and_signatures(room_version, event_dict, hostname, signing_key) return make_event_from_dict(event_dict, room_version, internal_metadata_dict=internal_metadata_dict)
def _create_new_client_event(self, builder): latest_ret = yield self.store.get_latest_event_ids_and_hashes_in_room( builder.room_id, ) if latest_ret: depth = max([d for _, _, d in latest_ret]) + 1 else: depth = 1 prev_events = [(event_id, prev_hashes) for event_id, prev_hashes, _ in latest_ret] builder.prev_events = prev_events builder.depth = depth state_handler = self.state_handler context = yield state_handler.compute_event_context(builder) # If we've received an invite over federation, there are no latest # events in the room, because we don't know enough about the graph # fragment we received to treat it like a graph, so the above returned # no relevant events. It may have returned some events (if we have # joined and left the room), but not useful ones, like the invite. if (not self.is_host_in_room(context.current_state) and builder.type == EventTypes.Member): prev_member_event = yield self.store.get_room_member( builder.sender, builder.room_id) # The prev_member_event may already be in context.current_state, # despite us not being present in the room; in particular, if # inviting user, and all other local users, have already left. # # In that case, we have all the information we need, and we don't # want to drop "context" - not least because we may need to handle # the invite locally, which will require us to have the whole # context (not just prev_member_event) to auth it. # context_event_ids = (e.event_id for e in context.current_state.values()) if (prev_member_event and prev_member_event.event_id not in context_event_ids): # The prev_member_event is missing from context, so it must # have arrived over federation and is an outlier. We forcibly # set our context to the invite we received over federation builder.prev_events = (prev_member_event.event_id, prev_member_event.prev_events) context = yield state_handler.compute_event_context( builder, old_state=(prev_member_event, ), outlier=True) if builder.is_state(): builder.prev_state = yield self.store.add_event_hashes( context.prev_state_events) yield self.auth.add_auth_events(builder, context) add_hashes_and_signatures(builder, self.server_name, self.signing_key) event = builder.build() logger.debug( "Created event %s with current state: %s", event.event_id, context.current_state, ) defer.returnValue(( event, context, ))
def _create_new_client_event(self, builder, prev_event_ids=None): if prev_event_ids: prev_events = yield self.store.add_event_hashes(prev_event_ids) prev_max_depth = yield self.store.get_max_depth_of_events( prev_event_ids) depth = prev_max_depth + 1 else: latest_ret = yield self.store.get_latest_event_ids_and_hashes_in_room( builder.room_id, ) # We want to limit the max number of prev events we point to in our # new event if len(latest_ret) > 10: # Sort by reverse depth, so we point to the most recent. latest_ret.sort(key=lambda a: -a[2]) new_latest_ret = latest_ret[:5] # We also randomly point to some of the older events, to make # sure that we don't completely ignore the older events. if latest_ret[5:]: sample_size = min(5, len(latest_ret[5:])) new_latest_ret.extend( random.sample(latest_ret[5:], sample_size)) latest_ret = new_latest_ret if latest_ret: depth = max([d for _, _, d in latest_ret]) + 1 else: depth = 1 prev_events = [(event_id, prev_hashes) for event_id, prev_hashes, _ in latest_ret] builder.prev_events = prev_events builder.depth = depth state_handler = self.state_handler context = yield state_handler.compute_event_context(builder) if builder.is_state(): builder.prev_state = yield self.store.add_event_hashes( context.prev_state_events) yield self.auth.add_auth_events(builder, context) signing_key = self.hs.config.signing_key[0] add_hashes_and_signatures(builder, self.server_name, signing_key) event = builder.build() logger.debug( "Created event %s with state: %s", event.event_id, context.prev_state_ids, ) defer.returnValue(( event, context, ))
def do_invite_join(self, target_hosts, room_id, joinee, content, snapshot): """ Attempts to join the `joinee` to the room `room_id` via the server `target_host`. This first triggers a /make_join/ request that returns a partial event that we can fill out and sign. This is then sent to the remote server via /send_join/ which responds with the state at that event and the auth_chains. We suspend processing of any received events from this room until we have finished processing the join. """ logger.debug("Joining %s to %s", joinee, room_id) yield self.store.clean_room_for_join(room_id) origin, pdu = yield self.replication_layer.make_join( target_hosts, room_id, joinee) logger.debug("Got response to make_join: %s", pdu) event = pdu # We should assert some things. # FIXME: Do this in a nicer way assert (event.type == EventTypes.Member) assert (event.user_id == joinee) assert (event.state_key == joinee) assert (event.room_id == room_id) event.internal_metadata.outlier = False self.room_queues[room_id] = [] builder = self.event_builder_factory.new(unfreeze( event.get_pdu_json())) handled_events = set() try: builder.event_id = self.event_builder_factory.create_event_id() builder.origin = self.hs.hostname builder.content = content if not hasattr(event, "signatures"): builder.signatures = {} add_hashes_and_signatures( builder, self.hs.hostname, self.hs.config.signing_key[0], ) new_event = builder.build() # Try the host we successfully got a response to /make_join/ # request first. try: target_hosts.remove(origin) target_hosts.insert(0, origin) except ValueError: pass ret = yield self.replication_layer.send_join( target_hosts, new_event) origin = ret["origin"] state = ret["state"] auth_chain = ret["auth_chain"] auth_chain.sort(key=lambda e: e.depth) handled_events.update([s.event_id for s in state]) handled_events.update([a.event_id for a in auth_chain]) handled_events.add(new_event.event_id) logger.debug("do_invite_join auth_chain: %s", auth_chain) logger.debug("do_invite_join state: %s", state) logger.debug("do_invite_join event: %s", new_event) try: yield self.store.store_room(room_id=room_id, room_creator_user_id="", is_public=False) except: # FIXME pass ev_infos = [] for e in itertools.chain(state, auth_chain): if e.event_id == event.event_id: continue e.internal_metadata.outlier = True auth_ids = [e_id for e_id, _ in e.auth_events] ev_infos.append({ "event": e, "auth_events": {(e.type, e.state_key): e for e in auth_chain if e.event_id in auth_ids} }) yield self._handle_new_events(origin, ev_infos, outliers=True) auth_ids = [e_id for e_id, _ in event.auth_events] auth_events = {(e.type, e.state_key): e for e in auth_chain if e.event_id in auth_ids} _, event_stream_id, max_stream_id = yield self._handle_new_event( origin, new_event, state=state, current_state=state, auth_events=auth_events, ) with PreserveLoggingContext(): d = self.notifier.on_new_room_event(new_event, event_stream_id, max_stream_id, extra_users=[joinee]) def log_failure(f): logger.warn("Failed to notify about %s: %s", new_event.event_id, f.value) d.addErrback(log_failure) logger.debug("Finished joining %s to %s", joinee, room_id) finally: room_queue = self.room_queues[room_id] del self.room_queues[room_id] for p, origin in room_queue: if p.event_id in handled_events: continue try: self.on_receive_pdu(origin, p, backfilled=False) except: logger.exception("Couldn't handle pdu") defer.returnValue(True)
def create_new_client_event(self, builder, requester=None, prev_events_and_hashes=None): """Create a new event for a local client Args: builder (EventBuilder): requester (synapse.types.Requester|None): prev_events_and_hashes (list[(str, dict[str, str], int)]|None): the forward extremities to use as the prev_events for the new event. For each event, a tuple of (event_id, hashes, depth) where *hashes* is a map from algorithm to hash. If None, they will be requested from the database. Returns: Deferred[(synapse.events.EventBase, synapse.events.snapshot.EventContext)] """ if prev_events_and_hashes is not None: assert len(prev_events_and_hashes) <= 10, \ "Attempting to create an event with %i prev_events" % ( len(prev_events_and_hashes), ) else: prev_events_and_hashes = \ yield self.store.get_prev_events_for_room(builder.room_id) if prev_events_and_hashes: depth = max([d for _, _, d in prev_events_and_hashes]) + 1 # we cap depth of generated events, to ensure that they are not # rejected by other servers (and so that they can be persisted in # the db) depth = min(depth, MAX_DEPTH) else: depth = 1 prev_events = [ (event_id, prev_hashes) for event_id, prev_hashes, _ in prev_events_and_hashes ] builder.prev_events = prev_events builder.depth = depth context = yield self.state.compute_event_context(builder) if requester: context.app_service = requester.app_service if builder.is_state(): builder.prev_state = yield self.store.add_event_hashes( context.prev_state_events ) yield self.auth.add_auth_events(builder, context) signing_key = self.hs.config.signing_key[0] add_hashes_and_signatures( builder, self.server_name, signing_key ) event = builder.build() logger.debug( "Created event %s", event.event_id, ) defer.returnValue( (event, context,) )
def do_invite_join(self, target_hosts, room_id, joinee, content, snapshot): """ Attempts to join the `joinee` to the room `room_id` via the server `target_host`. This first triggers a /make_join/ request that returns a partial event that we can fill out and sign. This is then sent to the remote server via /send_join/ which responds with the state at that event and the auth_chains. We suspend processing of any received events from this room until we have finished processing the join. """ logger.debug("Joining %s to %s", joinee, room_id) yield self.store.clean_room_for_join(room_id) origin, pdu = yield self.replication_layer.make_join( target_hosts, room_id, joinee ) logger.debug("Got response to make_join: %s", pdu) event = pdu # We should assert some things. # FIXME: Do this in a nicer way assert(event.type == EventTypes.Member) assert(event.user_id == joinee) assert(event.state_key == joinee) assert(event.room_id == room_id) event.internal_metadata.outlier = False self.room_queues[room_id] = [] builder = self.event_builder_factory.new( unfreeze(event.get_pdu_json()) ) handled_events = set() try: builder.event_id = self.event_builder_factory.create_event_id() builder.origin = self.hs.hostname builder.content = content if not hasattr(event, "signatures"): builder.signatures = {} add_hashes_and_signatures( builder, self.hs.hostname, self.hs.config.signing_key[0], ) new_event = builder.build() # Try the host we successfully got a response to /make_join/ # request first. try: target_hosts.remove(origin) target_hosts.insert(0, origin) except ValueError: pass ret = yield self.replication_layer.send_join( target_hosts, new_event ) origin = ret["origin"] state = ret["state"] auth_chain = ret["auth_chain"] auth_chain.sort(key=lambda e: e.depth) handled_events.update([s.event_id for s in state]) handled_events.update([a.event_id for a in auth_chain]) handled_events.add(new_event.event_id) logger.debug("do_invite_join auth_chain: %s", auth_chain) logger.debug("do_invite_join state: %s", state) logger.debug("do_invite_join event: %s", new_event) try: yield self.store.store_room( room_id=room_id, room_creator_user_id="", is_public=False ) except: # FIXME pass for e in auth_chain: e.internal_metadata.outlier = True if e.event_id == event.event_id: continue try: auth_ids = [e_id for e_id, _ in e.auth_events] auth = { (e.type, e.state_key): e for e in auth_chain if e.event_id in auth_ids } yield self._handle_new_event( origin, e, auth_events=auth ) except: logger.exception( "Failed to handle auth event %s", e.event_id, ) for e in state: if e.event_id == event.event_id: continue e.internal_metadata.outlier = True try: auth_ids = [e_id for e_id, _ in e.auth_events] auth = { (e.type, e.state_key): e for e in auth_chain if e.event_id in auth_ids } yield self._handle_new_event( origin, e, auth_events=auth ) except: logger.exception( "Failed to handle state event %s", e.event_id, ) auth_ids = [e_id for e_id, _ in event.auth_events] auth_events = { (e.type, e.state_key): e for e in auth_chain if e.event_id in auth_ids } yield self._handle_new_event( origin, new_event, state=state, current_state=state, auth_events=auth_events, ) yield self.notifier.on_new_room_event( new_event, extra_users=[joinee] ) logger.debug("Finished joining %s to %s", joinee, room_id) finally: room_queue = self.room_queues[room_id] del self.room_queues[room_id] for p, origin in room_queue: if p.event_id in handled_events: continue try: self.on_receive_pdu(origin, p, backfilled=False) except: logger.exception("Couldn't handle pdu") defer.returnValue(True)
def do_invite_join(self, target_host, room_id, joinee, content, snapshot): """ Attempts to join the `joinee` to the room `room_id` via the server `target_host`. This first triggers a /make_join/ request that returns a partial event that we can fill out and sign. This is then sent to the remote server via /send_join/ which responds with the state at that event and the auth_chains. We suspend processing of any received events from this room until we have finished processing the join. """ logger.debug("Joining %s to %s", joinee, room_id) pdu = yield self.replication_layer.make_join( target_host, room_id, joinee ) logger.debug("Got response to make_join: %s", pdu) event = pdu # We should assert some things. assert(event.type == RoomMemberEvent.TYPE) assert(event.user_id == joinee) assert(event.state_key == joinee) assert(event.room_id == room_id) event.outlier = False self.room_queues[room_id] = [] try: event.event_id = self.event_factory.create_event_id() event.origin = self.hs.hostname event.content = content if not hasattr(event, "signatures"): event.signatures = {} add_hashes_and_signatures( event, self.hs.hostname, self.hs.config.signing_key[0], ) ret = yield self.replication_layer.send_join( target_host, event ) state = ret["state"] auth_chain = ret["auth_chain"] auth_chain.sort(key=lambda e: e.depth) logger.debug("do_invite_join auth_chain: %s", auth_chain) logger.debug("do_invite_join state: %s", state) logger.debug("do_invite_join event: %s", event) try: yield self.store.store_room( room_id=room_id, room_creator_user_id="", is_public=False ) except: # FIXME pass for e in auth_chain: e.outlier = True try: yield self._handle_new_event(e, fetch_missing=False) except: logger.exception( "Failed to parse auth event %s", e.event_id, ) for e in state: # FIXME: Auth these. e.outlier = True try: yield self._handle_new_event( e, fetch_missing=True ) except: logger.exception( "Failed to parse state event %s", e.event_id, ) yield self._handle_new_event( event, state=state, current_state=state, ) yield self.notifier.on_new_room_event( event, extra_users=[joinee] ) logger.debug("Finished joining %s to %s", joinee, room_id) finally: room_queue = self.room_queues[room_id] del self.room_queues[room_id] for p, origin in room_queue: try: self.on_receive_pdu(origin, p, backfilled=False) except: logger.exception("Couldn't handle pdu") defer.returnValue(True)
def main() -> None: parser = argparse.ArgumentParser( description="""Adds a signature to a JSON object. Example usage: $ scripts-dev/sign_json.py -N test -k localhost.signing.key "{}" {"signatures":{"test":{"ed25519:a_ZnZh":"LmPnml6iM0iR..."}}} """, formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument( "-N", "--server-name", help="Name to give as the local homeserver. If unspecified, will be " "read from the config file.", ) parser.add_argument( "-k", "--signing-key-path", help="Path to the file containing the private ed25519 key to sign the " "request with.", ) parser.add_argument( "-K", "--signing-key", help="The private ed25519 key to sign the request with.", ) parser.add_argument( "-c", "--config", default="homeserver.yaml", help= ("Path to synapse config file, from which the server name and/or signing " "key path will be read. Ignored if --server-name and --signing-key(-path) " "are both given."), ) parser.add_argument( "--sign-event-room-version", type=str, help= ("Sign the JSON as an event for the given room version, rather than raw JSON. " "This means that we will add a 'hashes' object, and redact the event before " "signing."), ) input_args = parser.add_mutually_exclusive_group() input_args.add_argument("input_data", nargs="?", help="Raw JSON to be signed.") input_args.add_argument( "-i", "--input", type=argparse.FileType("r"), default=sys.stdin, help= ("A file from which to read the JSON to be signed. If neither --input nor " "input_data are given, JSON will be read from stdin."), ) parser.add_argument( "-o", "--output", type=argparse.FileType("w"), default=sys.stdout, help="Where to write the signed JSON. Defaults to stdout.", ) args = parser.parse_args() if not args.server_name or not (args.signing_key_path or args.signing_key): read_args_from_config(args) if args.signing_key: keys = read_signing_keys([args.signing_key]) else: with open(args.signing_key_path) as f: keys = read_signing_keys(f) json_to_sign = args.input_data if json_to_sign is None: json_to_sign = args.input.read() try: obj = json.loads(json_to_sign) except JSONDecodeError as e: print("Unable to parse input as JSON: %s" % e, file=sys.stderr) sys.exit(1) if not isinstance(obj, dict): print("Input json was not an object", file=sys.stderr) sys.exit(1) if args.sign_event_room_version: room_version = KNOWN_ROOM_VERSIONS.get(args.sign_event_room_version) if not room_version: print(f"Unknown room version {args.sign_event_room_version}", file=sys.stderr) sys.exit(1) add_hashes_and_signatures(room_version, obj, args.server_name, keys[0]) else: sign_json(obj, args.server_name, keys[0]) for c in json_encoder.iterencode(obj): args.output.write(c) args.output.write("\n")
def do_invite_join(self, target_host, room_id, joinee, content, snapshot): """ Attempts to join the `joinee` to the room `room_id` via the server `target_host`. This first triggers a /make_join/ request that returns a partial event that we can fill out and sign. This is then sent to the remote server via /send_join/ which responds with the state at that event and the auth_chains. We suspend processing of any received events from this room until we have finished processing the join. """ logger.debug("Joining %s to %s", joinee, room_id) pdu = yield self.replication_layer.make_join(target_host, room_id, joinee) logger.debug("Got response to make_join: %s", pdu) event = pdu # We should assert some things. # FIXME: Do this in a nicer way assert (event.type == EventTypes.Member) assert (event.user_id == joinee) assert (event.state_key == joinee) assert (event.room_id == room_id) event.internal_metadata.outlier = False self.room_queues[room_id] = [] builder = self.event_builder_factory.new(event.get_pdu_json()) handled_events = set() try: builder.event_id = self.event_builder_factory.create_event_id() builder.origin = self.hs.hostname builder.content = content if not hasattr(event, "signatures"): builder.signatures = {} add_hashes_and_signatures( builder, self.hs.hostname, self.hs.config.signing_key[0], ) new_event = builder.build() ret = yield self.replication_layer.send_join( target_host, new_event) state = ret["state"] auth_chain = ret["auth_chain"] auth_chain.sort(key=lambda e: e.depth) handled_events.update([s.event_id for s in state]) handled_events.update([a.event_id for a in auth_chain]) handled_events.add(new_event.event_id) logger.debug("do_invite_join auth_chain: %s", auth_chain) logger.debug("do_invite_join state: %s", state) logger.debug("do_invite_join event: %s", new_event) try: yield self.store.store_room(room_id=room_id, room_creator_user_id="", is_public=False) except: # FIXME pass for e in auth_chain: e.internal_metadata.outlier = True try: yield self._handle_new_event(e) except: logger.exception( "Failed to handle auth event %s", e.event_id, ) for e in state: # FIXME: Auth these. e.internal_metadata.outlier = True try: yield self._handle_new_event(e, fetch_auth_from=target_host) except: logger.exception( "Failed to handle state event %s", e.event_id, ) yield self._handle_new_event( new_event, state=state, current_state=state, ) yield self.notifier.on_new_room_event(new_event, extra_users=[joinee]) logger.debug("Finished joining %s to %s", joinee, room_id) finally: room_queue = self.room_queues[room_id] del self.room_queues[room_id] for p, origin in room_queue: if p.event_id in handled_events: continue try: self.on_receive_pdu(origin, p, backfilled=False) except: logger.exception("Couldn't handle pdu") defer.returnValue(True)
def create_new_client_event(self, builder, requester=None, prev_events_and_hashes=None): """Create a new event for a local client Args: builder (EventBuilder): requester (synapse.types.Requester|None): prev_events_and_hashes (list[(str, dict[str, str], int)]|None): the forward extremities to use as the prev_events for the new event. For each event, a tuple of (event_id, hashes, depth) where *hashes* is a map from algorithm to hash. If None, they will be requested from the database. Returns: Deferred[(synapse.events.EventBase, synapse.events.snapshot.EventContext)] """ if prev_events_and_hashes is not None: assert len(prev_events_and_hashes) <= 10, \ "Attempting to create an event with %i prev_events" % ( len(prev_events_and_hashes), ) else: prev_events_and_hashes = \ yield self.store.get_prev_events_for_room(builder.room_id) if prev_events_and_hashes: depth = max([d for _, _, d in prev_events_and_hashes]) + 1 else: depth = 1 prev_events = [(event_id, prev_hashes) for event_id, prev_hashes, _ in prev_events_and_hashes] builder.prev_events = prev_events builder.depth = depth context = yield self.state.compute_event_context(builder) if requester: context.app_service = requester.app_service if builder.is_state(): builder.prev_state = yield self.store.add_event_hashes( context.prev_state_events) yield self.auth.add_auth_events(builder, context) signing_key = self.hs.config.signing_key[0] add_hashes_and_signatures(builder, self.server_name, signing_key) event = builder.build() logger.debug( "Created event %s with state: %s", event.event_id, context.prev_state_ids, ) defer.returnValue(( event, context, ))
def _create_new_client_event(self, builder): latest_ret = yield self.store.get_latest_event_ids_and_hashes_in_room( builder.room_id, ) if latest_ret: depth = max([d for _, _, d in latest_ret]) + 1 else: depth = 1 prev_events = [ (event_id, prev_hashes) for event_id, prev_hashes, _ in latest_ret ] builder.prev_events = prev_events builder.depth = depth state_handler = self.state_handler context = yield state_handler.compute_event_context(builder) # If we've received an invite over federation, there are no latest # events in the room, because we don't know enough about the graph # fragment we received to treat it like a graph, so the above returned # no relevant events. It may have returned some events (if we have # joined and left the room), but not useful ones, like the invite. if ( not self.is_host_in_room(context.current_state) and builder.type == EventTypes.Member ): prev_member_event = yield self.store.get_room_member( builder.sender, builder.room_id ) # The prev_member_event may already be in context.current_state, # despite us not being present in the room; in particular, if # inviting user, and all other local users, have already left. # # In that case, we have all the information we need, and we don't # want to drop "context" - not least because we may need to handle # the invite locally, which will require us to have the whole # context (not just prev_member_event) to auth it. # context_event_ids = ( e.event_id for e in context.current_state.values() ) if ( prev_member_event and prev_member_event.event_id not in context_event_ids ): # The prev_member_event is missing from context, so it must # have arrived over federation and is an outlier. We forcibly # set our context to the invite we received over federation builder.prev_events = ( prev_member_event.event_id, prev_member_event.prev_events ) context = yield state_handler.compute_event_context( builder, old_state=(prev_member_event,), outlier=True ) if builder.is_state(): builder.prev_state = yield self.store.add_event_hashes( context.prev_state_events ) yield self.auth.add_auth_events(builder, context) add_hashes_and_signatures( builder, self.server_name, self.signing_key ) event = builder.build() logger.debug( "Created event %s with current state: %s", event.event_id, context.current_state, ) defer.returnValue( (event, context,) )