def _get_event_from_row(self, internal_metadata, js, redacted, check_redacted=True, get_prev_content=False, rejected_reason=None): d = json.loads(js) internal_metadata = json.loads(internal_metadata) if rejected_reason: rejected_reason = yield self._simple_select_one_onecol( table="rejections", keyvalues={"event_id": rejected_reason}, retcol="reason", desc="_get_event_from_row", ) ev = FrozenEvent( d, internal_metadata_dict=internal_metadata, rejected_reason=rejected_reason, ) if check_redacted and redacted: ev = prune_event(ev) redaction_id = yield self._simple_select_one_onecol( table="redactions", keyvalues={"redacts": ev.event_id}, retcol="event_id", desc="_get_event_from_row", ) ev.unsigned["redacted_by"] = redaction_id # Get the redaction event. because = yield self.get_event( redaction_id, check_redacted=False, allow_none=True, ) if because: # It's fine to do add the event directly, since get_pdu_json # will serialise this field correctly ev.unsigned["redacted_because"] = because if get_prev_content and "replaces_state" in ev.unsigned: prev = yield self.get_event( ev.unsigned["replaces_state"], get_prev_content=False, allow_none=True, ) if prev: ev.unsigned["prev_content"] = prev.content ev.unsigned["prev_sender"] = prev.sender self._get_event_cache.prefill( (ev.event_id, check_redacted, get_prev_content), ev ) defer.returnValue(ev)
def _get_event_from_row_txn(self, txn, internal_metadata, js, redacted, check_redacted=True, get_prev_content=False, rejected_reason=None): d = json.loads(js) internal_metadata = json.loads(internal_metadata) if rejected_reason: rejected_reason = self._simple_select_one_onecol_txn( txn, table="rejections", keyvalues={"event_id": rejected_reason}, retcol="reason", ) ev = FrozenEvent( d, internal_metadata_dict=internal_metadata, rejected_reason=rejected_reason, ) if check_redacted and redacted: ev = prune_event(ev) redaction_id = self._simple_select_one_onecol_txn( txn, table="redactions", keyvalues={"redacts": ev.event_id}, retcol="event_id", ) ev.unsigned["redacted_by"] = redaction_id # Get the redaction event. because = self._get_event_txn( txn, redaction_id, check_redacted=False ) if because: ev.unsigned["redacted_because"] = because if get_prev_content and "replaces_state" in ev.unsigned: prev = self._get_event_txn( txn, ev.unsigned["replaces_state"], get_prev_content=False, ) if prev: ev.unsigned["prev_content"] = prev.content ev.unsigned["prev_sender"] = prev.sender self._get_event_cache.prefill( (ev.event_id, check_redacted, get_prev_content), ev ) return ev
def _get_event_from_row(self, internal_metadata, js, redacted, check_redacted=True, get_prev_content=False, rejected_reason=None): d = json.loads(js) internal_metadata = json.loads(internal_metadata) if rejected_reason: rejected_reason = yield self._simple_select_one_onecol( table="rejections", keyvalues={"event_id": rejected_reason}, retcol="reason", desc="_get_event_from_row", ) ev = FrozenEvent( d, internal_metadata_dict=internal_metadata, rejected_reason=rejected_reason, ) if check_redacted and redacted: ev = prune_event(ev) redaction_id = yield self._simple_select_one_onecol( table="redactions", keyvalues={"redacts": ev.event_id}, retcol="event_id", desc="_get_event_from_row", ) ev.unsigned["redacted_by"] = redaction_id # Get the redaction event. because = yield self.get_event( redaction_id, check_redacted=False ) if because: ev.unsigned["redacted_because"] = because if get_prev_content and "replaces_state" in ev.unsigned: prev = yield self.get_event( ev.unsigned["replaces_state"], get_prev_content=False, ) if prev: ev.unsigned["prev_content"] = prev.get_dict()["content"] self._get_event_cache.prefill( ev.event_id, check_redacted, get_prev_content, ev ) defer.returnValue(ev)
def _get_event_from_row_txn(self, txn, internal_metadata, js, redacted, check_redacted=True, get_prev_content=False, rejected_reason=None): start_time = time.time() * 1000 def update_counter(desc, last_time): curr_time = self._get_event_counters.update(desc, last_time) sql_getevents_timer.inc_by(curr_time - last_time, desc) return curr_time d = json.loads(js) start_time = update_counter("decode_json", start_time) internal_metadata = json.loads(internal_metadata) start_time = update_counter("decode_internal", start_time) ev = FrozenEvent( d, internal_metadata_dict=internal_metadata, rejected_reason=rejected_reason, ) start_time = update_counter("build_frozen_event", start_time) if check_redacted and redacted: ev = prune_event(ev) ev.unsigned["redacted_by"] = redacted # Get the redaction event. because = self._get_event_txn( txn, redacted, check_redacted=False ) if because: ev.unsigned["redacted_because"] = because start_time = update_counter("redact_event", start_time) if get_prev_content and "replaces_state" in ev.unsigned: prev = self._get_event_txn( txn, ev.unsigned["replaces_state"], get_prev_content=False, ) if prev: ev.unsigned["prev_content"] = prev.get_dict()["content"] start_time = update_counter("get_prev_content", start_time) return ev
def _get_event_txn(self, txn, event_id, check_redacted=True, get_prev_content=True): sql = ( "SELECT internal_metadata, json, r.event_id FROM event_json as e " "LEFT JOIN redactions as r ON e.event_id = r.redacts " "WHERE e.event_id = ? " "LIMIT 1 " ) txn.execute(sql, (event_id,)) res = txn.fetchone() if not res: return None internal_metadata, js, redacted = res d = json.loads(js) internal_metadata = json.loads(internal_metadata) ev = FrozenEvent(d, internal_metadata_dict=internal_metadata) if check_redacted and redacted: ev = prune_event(ev) ev.unsigned["redacted_by"] = redacted # Get the redaction event. because = self._get_event_txn( txn, redacted, check_redacted=False ) if because: ev.unsigned["redacted_because"] = because if get_prev_content and "replaces_state" in ev.unsigned: prev = self._get_event_txn( txn, ev.unsigned["replaces_state"], get_prev_content=False, ) if prev: ev.unsigned["prev_content"] = prev.get_dict()["content"] return ev
def make_pdu(prev_pdus=[], **kwargs): """Provide some default fields for making a PduTuple.""" pdu_fields = { "state_key": None, "prev_events": prev_pdus, } pdu_fields.update(kwargs) return FrozenEvent(pdu_fields)
def _get_event_from_row(self, internal_metadata, js, redacted, rejected_reason=None): with Measure(self._clock, "_get_event_from_row"): d = json.loads(js) internal_metadata = json.loads(internal_metadata) if rejected_reason: rejected_reason = yield self._simple_select_one_onecol( table="rejections", keyvalues={"event_id": rejected_reason}, retcol="reason", desc="_get_event_from_row_rejected_reason", ) original_ev = FrozenEvent( d, internal_metadata_dict=internal_metadata, rejected_reason=rejected_reason, ) redacted_event = None if redacted: redacted_event = prune_event(original_ev) redaction_id = yield self._simple_select_one_onecol( table="redactions", keyvalues={"redacts": redacted_event.event_id}, retcol="event_id", desc="_get_event_from_row_redactions", ) redacted_event.unsigned["redacted_by"] = redaction_id # Get the redaction event. because = yield self.get_event( redaction_id, check_redacted=False, allow_none=True, ) if because: # It's fine to do add the event directly, since get_pdu_json # will serialise this field correctly redacted_event.unsigned["redacted_because"] = because cache_entry = _EventCacheEntry( event=original_ev, redacted_event=redacted_event, ) self._get_event_cache.prefill((original_ev.event_id, ), cache_entry) defer.returnValue(cache_entry)
def _power_levels_event(sender, content): return FrozenEvent({ "room_id": TEST_ROOM_ID, "event_id": _get_event_id(), "type": "m.room.power_levels", "sender": sender, "state_key": "", "content": content, })
def _create_event(user_id): return FrozenEvent( { "room_id": TEST_ROOM_ID, "event_id": _get_event_id(), "type": "m.room.create", "sender": user_id, "content": {"creator": user_id}, } )
def _get_event_from_row_txn(self, txn, internal_metadata, js, redacted, check_redacted=True, get_prev_content=False): start_time = time.time() * 1000 update_counter = self._get_event_counters.update d = json.loads(js) start_time = update_counter("decode_json", start_time) internal_metadata = json.loads(internal_metadata) start_time = update_counter("decode_internal", start_time) ev = FrozenEvent(d, internal_metadata_dict=internal_metadata) start_time = update_counter("build_frozen_event", start_time) if check_redacted and redacted: ev = prune_event(ev) ev.unsigned["redacted_by"] = redacted # Get the redaction event. because = self._get_event_txn(txn, redacted, check_redacted=False) if because: ev.unsigned["redacted_because"] = because start_time = update_counter("redact_event", start_time) if get_prev_content and "replaces_state" in ev.unsigned: prev = self._get_event_txn( txn, ev.unsigned["replaces_state"], get_prev_content=False, ) if prev: ev.unsigned["prev_content"] = prev.get_dict()["content"] start_time = update_counter("get_prev_content", start_time) return ev
def _random_state_event(sender): return FrozenEvent( { "room_id": TEST_ROOM_ID, "event_id": _get_event_id(), "type": "test.state", "sender": sender, "state_key": "", "content": {"membership": "join"}, } )
def _join_event(user_id): return FrozenEvent( { "room_id": TEST_ROOM_ID, "event_id": _get_event_id(), "type": "m.room.member", "sender": user_id, "state_key": user_id, "content": {"membership": "join"}, } )
def test_msg(self): pdu = FrozenEvent({ "type": EventTypes.Message, "room_id": "foo", "content": { "msgtype": u"fooo" }, "origin_server_ts": 0, "event_id": "$a:b", "user_id": "@a:b", "origin": "b", "auth_events": [], "hashes": { "sha256": "AcLrgtUIqqwaGoHhrEvYG1YLDIsVPYJdSRGhkp3jJp8" }, }) self.datastore.persist_event.return_value = defer.succeed(None) self.datastore.get_room.return_value = defer.succeed(True) self.auth.check_host_in_room.return_value = defer.succeed(True) def have_events(event_ids): return defer.succeed({}) self.datastore.have_events.side_effect = have_events def annotate(ev, old_state=None): context = Mock() context.current_state = {} context.auth_events = {} return defer.succeed(context) self.state_handler.compute_event_context.side_effect = annotate yield self.handlers.federation_handler.on_receive_pdu("fo", pdu, False) self.datastore.persist_event.assert_called_once_with( ANY, is_new_state=True, backfilled=False, current_state=None, context=ANY, ) self.state_handler.compute_event_context.assert_called_once_with( ANY, old_state=None, ) self.auth.check.assert_called_once_with(ANY, auth_events={}) self.notifier.on_new_room_event.assert_called_once_with(ANY, extra_users=[])
def _get_event_from_row_txn(self, txn, internal_metadata, js, redacted, check_redacted=True, get_prev_content=False, rejected_reason=None): d = json.loads(js) internal_metadata = json.loads(internal_metadata) if rejected_reason: rejected_reason = self._simple_select_one_onecol_txn( txn, table="rejections", keyvalues={"event_id": rejected_reason}, retcol="reason", ) ev = FrozenEvent( d, internal_metadata_dict=internal_metadata, rejected_reason=rejected_reason, ) if check_redacted and redacted: ev = prune_event(ev) redaction_id = self._simple_select_one_onecol_txn( txn, table="redactions", keyvalues={"redacts": ev.event_id}, retcol="event_id", ) ev.unsigned["redacted_by"] = redaction_id # Get the redaction event. because = self._get_event_txn(txn, redaction_id, check_redacted=False) if because: ev.unsigned["redacted_because"] = because if get_prev_content and "replaces_state" in ev.unsigned: prev = self._get_event_txn( txn, ev.unsigned["replaces_state"], get_prev_content=False, ) if prev: ev.unsigned["prev_content"] = prev.content ev.unsigned["prev_sender"] = prev.sender self._get_event_cache.prefill( (ev.event_id, check_redacted, get_prev_content), ev) return ev
def _get_evaluator(self, content): event = FrozenEvent( { "event_id": "$event_id", "type": "m.room.history_visibility", "sender": "@user:test", "state_key": "", "room_id": "#room:test", "content": content, }, RoomVersions.V1, ) room_member_count = 0 sender_power_level = 0 power_levels = {} return PushRuleEvaluatorForEvent(event, room_member_count, sender_power_level, power_levels)
def test_redactions(self): self.persist(type="m.room.create", key="", creator=USER_ID) self.persist(type="m.room.member", key=USER_ID, membership="join") msg = self.persist(type="m.room.message", msgtype="m.text", body="Hello") self.replicate() self.check("get_event", [msg.event_id], msg) redaction = self.persist(type="m.room.redaction", redacts=msg.event_id) self.replicate() msg_dict = msg.get_dict() msg_dict["content"] = {} msg_dict["unsigned"]["redacted_by"] = redaction.event_id msg_dict["unsigned"]["redacted_because"] = redaction redacted = FrozenEvent(msg_dict, msg.internal_metadata.get_dict()) self.check("get_event", [msg.event_id], redacted)
def create_event(name=None, type=None, state_key=None, depth=2, event_id=None, prev_events=[], **kwargs): global _next_event_id if not event_id: _next_event_id += 1 event_id = "$%s:test" % (_next_event_id, ) if not name: if state_key is not None: name = "<%s-%s, %s>" % ( type, state_key, event_id, ) else: name = "<%s, %s>" % ( type, event_id, ) d = { "event_id": event_id, "type": type, "sender": "@user_id:example.com", "room_id": "!room_id:example.com", "depth": depth, "prev_events": prev_events, } if state_key is not None: d["state_key"] = state_key d.update(kwargs) event = FrozenEvent(d) return event
def notify(result): stream = result.get("events") if stream: max_position = stream["position"] for row in stream["rows"]: position = row[0] internal = json.loads(row[1]) event_json = json.loads(row[2]) event = FrozenEvent(event_json, internal_metadata_dict=internal) extra_users = () if event.type == EventTypes.Member: extra_users = (event.state_key, ) notifier.on_new_room_event(event, position, max_position, extra_users) notify_from_stream(result, "push_rules", "push_rules_key", user="******") notify_from_stream(result, "user_account_data", "account_data_key", user="******") notify_from_stream(result, "room_account_data", "account_data_key", user="******") notify_from_stream(result, "tag_account_data", "account_data_key", user="******") notify_from_stream(result, "receipts", "receipt_key", room="room_id") notify_from_stream(result, "typing", "typing_key", room="room_id") notify_from_stream(result, "to_device", "to_device_key", user="******") yield notify_device_list_update(result)
def __init__(self, events: Iterable[Tuple[StateKey, dict]]): """ Args: events: A state map to event contents. """ self._events = {} for i, (event_id, content) in enumerate(events): self._events[event_id] = FrozenEvent( { "event_id": "$event_id", "type": event_id[0], "sender": "@user:test", "state_key": event_id[1], "room_id": "#room:test", "content": content, "origin_server_ts": i, }, RoomVersions.V1, )
def setUp(self): event = FrozenEvent( { "event_id": "$event_id", "type": "m.room.history_visibility", "sender": "@user:test", "state_key": "", "room_id": "@room:test", "content": { "body": "foo bar baz" }, }, RoomVersions.V1, ) room_member_count = 0 sender_power_level = 0 power_levels = {} self.evaluator = PushRuleEvaluatorForEvent(event, room_member_count, sender_power_level, power_levels)
def event_from_pdu_json(pdu_json, outlier=False): """Construct a FrozenEvent from an event json received over federation Args: pdu_json (object): pdu as received over federation outlier (bool): True to mark this event as an outlier Returns: FrozenEvent Raises: SynapseError: if the pdu is missing required fields """ # we could probably enforce a bunch of other fields here (room_id, sender, # origin, etc etc) assert_params_in_request(pdu_json, ('event_id', 'type')) event = FrozenEvent(pdu_json) event.internal_metadata.outlier = outlier return event
def _handle_request(self, request): with Measure(self.clock, "repl_send_event_parse"): content = parse_json_object_from_request(request) event_dict = content["event"] internal_metadata = content["internal_metadata"] rejected_reason = content["rejected_reason"] event = FrozenEvent(event_dict, internal_metadata, rejected_reason) requester = Requester.deserialize(self.store, content["requester"]) context = yield EventContext.deserialize(self.store, content["context"]) ratelimit = content["ratelimit"] extra_users = [ UserID.from_string(u) for u in content["extra_users"] ] if requester.user: request.authenticated_entity = requester.user.to_string() logger.info( "Got event to send with ID: %s into room: %s", event.event_id, event.room_id, ) yield self.event_creation_handler.persist_and_notify_client_event( requester, event, context, ratelimit=ratelimit, extra_users=extra_users, ) defer.returnValue((200, {}))
def make_graph(db_name, room_id, file_prefix, limit): conn = sqlite3.connect(db_name) sql = ("SELECT json FROM event_json as j " "INNER JOIN events as e ON e.event_id = j.event_id " "WHERE j.room_id = ?") args = [room_id] if limit: sql += " ORDER BY topological_ordering DESC, stream_ordering DESC " "LIMIT ?" args.append(limit) c = conn.execute(sql, args) events = [FrozenEvent(json.loads(e[0])) for e in c.fetchall()] events.sort(key=lambda e: e.depth) node_map = {} state_groups = {} graph = pydot.Dot(graph_name="Test") for event in events: c = conn.execute( "SELECT state_group FROM event_to_state_groups " "WHERE event_id = ?", (event.event_id, ), ) res = c.fetchone() state_group = res[0] if res else None if state_group is not None: state_groups.setdefault(state_group, []).append(event.event_id) t = datetime.datetime.fromtimestamp( float(event.origin_server_ts) / 1000).strftime("%Y-%m-%d %H:%M:%S,%f") content = json.dumps(unfreeze(event.get_dict()["content"])) label = ("<" "<b>%(name)s </b><br/>" "Type: <b>%(type)s </b><br/>" "State key: <b>%(state_key)s </b><br/>" "Content: <b>%(content)s </b><br/>" "Time: <b>%(time)s </b><br/>" "Depth: <b>%(depth)s </b><br/>" "State group: %(state_group)s<br/>" ">") % { "name": event.event_id, "type": event.type, "state_key": event.get("state_key", None), "content": cgi.escape(content, quote=True), "time": t, "depth": event.depth, "state_group": state_group, } node = pydot.Node(name=event.event_id, label=label) node_map[event.event_id] = node graph.add_node(node) for event in events: for prev_id, _ in event.prev_events: try: end_node = node_map[prev_id] except: end_node = pydot.Node(name=prev_id, label="<<b>%s</b>>" % (prev_id, )) node_map[prev_id] = end_node graph.add_node(end_node) edge = pydot.Edge(node_map[event.event_id], end_node) graph.add_edge(edge) for group, event_ids in state_groups.items(): if len(event_ids) <= 1: continue cluster = pydot.Cluster(str(group), label="<State Group: %s>" % (str(group), )) for event_id in event_ids: cluster.add_node(node_map[event_id]) graph.add_subgraph(cluster) graph.write("%s.dot" % file_prefix, format="raw", prog="dot") graph.write_svg("%s.svg" % file_prefix, prog="dot")
def test_large_room(self): # see what happens when we have a large room with hundreds of thousands # of membership events # As above, the events to be filtered consist of 10 membership events, # where one of them is for a user on the server we are filtering for. import cProfile import pstats import time # we stub out the store, because building up all that state the normal # way is very slow. test_store = _TestStore() # our initial state is 100000 membership events and one # history_visibility event. room_state = [] history_visibility_evt = FrozenEvent({ "event_id": "$history_vis", "type": "m.room.history_visibility", "sender": "@resident_user_0:test.com", "state_key": "", "room_id": TEST_ROOM_ID, "content": { "history_visibility": "joined" }, }) room_state.append(history_visibility_evt) test_store.add_event(history_visibility_evt) for i in range(0, 100000): user = "******" % (i, ) evt = FrozenEvent({ "event_id": "$res_event_%i" % (i, ), "type": "m.room.member", "state_key": user, "sender": user, "room_id": TEST_ROOM_ID, "content": { "membership": "join", "extra": "zzz," }, }) room_state.append(evt) test_store.add_event(evt) events_to_filter = [] for i in range(0, 10): user = "******" % (i, "test_server" if i == 5 else "other_server") evt = FrozenEvent({ "event_id": "$evt%i" % (i, ), "type": "m.room.member", "state_key": user, "sender": user, "room_id": TEST_ROOM_ID, "content": { "membership": "join", "extra": "zzz" }, }) events_to_filter.append(evt) room_state.append(evt) test_store.add_event(evt) test_store.set_state_ids_for_event( evt, {(e.type, e.state_key): e.event_id for e in room_state}) pr = cProfile.Profile() pr.enable() logger.info("Starting filtering") start = time.time() filtered = yield filter_events_for_server(test_store, "test_server", events_to_filter) logger.info("Filtering took %f seconds", time.time() - start) pr.disable() with open("filter_events_for_server.profile", "w+") as f: ps = pstats.Stats(pr, stream=f).sort_stats("cumulative") ps.print_stats() # the result should be 5 redacted events, and 5 unredacted events. for i in range(0, 5): self.assertEqual(events_to_filter[i].event_id, filtered[i].event_id) self.assertNotIn("extra", filtered[i].content) for i in range(5, 10): self.assertEqual(events_to_filter[i].event_id, filtered[i].event_id) self.assertEqual(filtered[i].content["extra"], "zzz")
def run_test(self, evdict, matchdict): self.assertEquals( prune_event(FrozenEvent(evdict)).get_dict(), matchdict )
def MockEvent(**kwargs): if "event_id" not in kwargs: kwargs["event_id"] = "fake_event_id" if "type" not in kwargs: kwargs["type"] = "fake_type" return FrozenEvent(kwargs)
def test_cant_hide_past_history(self): """ If you send a message, you must be able to provide the direct prev_events that said event references. """ def post_json(destination, path, data, headers=None, timeout=0): if path.startswith("/_matrix/federation/v1/get_missing_events/"): return { "events": [{ "room_id": self.room_id, "sender": "@baduser:test.serv", "event_id": "three:test.serv", "depth": 1000, "origin_server_ts": 1, "type": "m.room.message", "origin": "test.serv", "content": "hewwo?", "auth_events": [], "prev_events": [("four:test.serv", {})], }] } self.http_client.post_json = post_json def get_json(destination, path, args, headers=None): if path.startswith("/_matrix/federation/v1/state_ids/"): d = self.successResultOf( self.homeserver.datastore.get_state_ids_for_event( "one:test.serv")) return succeed({ "pdu_ids": [ y for x, y in d.items() if x == ("m.room.member", "@us:test") ], "auth_chain_ids": list(d.values()), }) self.http_client.get_json = get_json # Figure out what the most recent event is most_recent = self.successResultOf( maybeDeferred( self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id))[0] # Make a good event good_event = FrozenEvent({ "room_id": self.room_id, "sender": "@baduser:test.serv", "event_id": "one:test.serv", "depth": 1000, "origin_server_ts": 1, "type": "m.room.message", "origin": "test.serv", "content": "hewwo?", "auth_events": [], "prev_events": [(most_recent, {})], }) d = self.handler.on_receive_pdu("test.serv", good_event, sent_to_us_directly=True) self.reactor.advance(1) self.assertEqual(self.successResultOf(d), None) bad_event = FrozenEvent({ "room_id": self.room_id, "sender": "@baduser:test.serv", "event_id": "two:test.serv", "depth": 1000, "origin_server_ts": 1, "type": "m.room.message", "origin": "test.serv", "content": "hewwo?", "auth_events": [], "prev_events": [("one:test.serv", {}), ("three:test.serv", {})], }) d = self.handler.on_receive_pdu("test.serv", bad_event, sent_to_us_directly=True) self.reactor.advance(1) extrem = maybeDeferred( self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id) self.assertEqual(self.successResultOf(extrem)[0], "two:test.serv") state = self.homeserver.get_state_handler().get_current_state_ids( self.room_id) self.reactor.advance(1) self.assertIn(("m.room.member", "@us:test"), self.successResultOf(state).keys())
def setUp(self): self.http_client = Mock() self.reactor = ThreadedMemoryReactorClock() self.hs_clock = Clock(self.reactor) self.homeserver = setup_test_homeserver( self.addCleanup, http_client=self.http_client, clock=self.hs_clock, reactor=self.reactor, ) user_id = UserID("us", "test") our_user = Requester(user_id, None, False, None, None) room_creator = self.homeserver.get_room_creation_handler() room = room_creator.create_room( our_user, room_creator.PRESETS_DICT["public_chat"], ratelimit=False) self.reactor.advance(0.1) self.room_id = self.successResultOf(room)["room_id"] # Figure out what the most recent event is most_recent = self.successResultOf( maybeDeferred( self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id))[0] join_event = FrozenEvent({ "room_id": self.room_id, "sender": "@baduser:test.serv", "state_key": "@baduser:test.serv", "event_id": "$join:test.serv", "depth": 1000, "origin_server_ts": 1, "type": "m.room.member", "origin": "test.servx", "content": { "membership": "join" }, "auth_events": [], "prev_state": [(most_recent, {})], "prev_events": [(most_recent, {})], }) self.handler = self.homeserver.get_handlers().federation_handler self.handler.do_auth = lambda *a, **b: succeed(True) self.client = self.homeserver.get_federation_client() self.client._check_sigs_and_hash_and_fetch = lambda dest, pdus, **k: succeed( pdus) # Send the join, it should return None (which is not an error) d = self.handler.on_receive_pdu("test.serv", join_event, sent_to_us_directly=True) self.reactor.advance(1) self.assertEqual(self.successResultOf(d), None) # Make sure we actually joined the room self.assertEqual( self.successResultOf( maybeDeferred( self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id))[0], "$join:test.serv", )
try: auth.check(e, auth_events=auth_events) except Exception as ex: print("Failed:", e.event_id, e.type, e.state_key) print("Auth_events:", auth_events) print(ex) print(json.dumps(e.get_dict(), sort_keys=True, indent=4)) # raise print("Success:", e.event_id, e.type, e.state_key) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("json", nargs="?", type=argparse.FileType("r"), default=sys.stdin) args = parser.parse_args() js = json.load(args.json) auth = Auth(Mock()) check_auth( auth, [FrozenEvent(d) for d in js["auth_chain"]], [FrozenEvent(d) for d in js.get("pdus", [])], )
def test_cant_hide_direct_ancestors(self): """ If you send a message, you must be able to provide the direct prev_events that said event references. """ def post_json(destination, path, data, headers=None, timeout=0): # If it asks us for new missing events, give them NOTHING if path.startswith("/_matrix/federation/v1/get_missing_events/"): return {"events": []} self.http_client.post_json = post_json # Figure out what the most recent event is most_recent = self.successResultOf( maybeDeferred( self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id))[0] # Now lie about an event lying_event = FrozenEvent({ "room_id": self.room_id, "sender": "@baduser:test.serv", "event_id": "one:test.serv", "depth": 1000, "origin_server_ts": 1, "type": "m.room.message", "origin": "test.serv", "content": { "body": "hewwo?" }, "auth_events": [], "prev_events": [("two:test.serv", {}), (most_recent, {})], }) with LoggingContext(request="lying_event"): d = self.handler.on_receive_pdu("test.serv", lying_event, sent_to_us_directly=True) # Step the reactor, so the database fetches come back self.reactor.advance(1) # on_receive_pdu should throw an error failure = self.failureResultOf(d) self.assertEqual( failure.value.args[0], ("ERROR 403: Your server isn't divulging details about prev_events " "referenced in this event."), ) # Make sure the invalid event isn't there extrem = maybeDeferred( self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id) self.assertEqual(self.successResultOf(extrem)[0], "$join:test.serv")
def MockEvent(**kwargs): return FrozenEvent(kwargs)
def persist(self, sender=USER_ID, room_id=ROOM_ID, type={}, key=None, internal={}, state=None, reset_state=False, backfill=False, depth=None, prev_events=[], auth_events=[], prev_state=[], redacts=None, push_actions=[], **content): """ Returns: synapse.events.FrozenEvent: The event that was persisted. """ if depth is None: depth = self.event_id if not prev_events: latest_event_ids = yield self.master_store.get_latest_event_ids_in_room( room_id) prev_events = [(ev_id, {}) for ev_id in latest_event_ids] event_dict = { "sender": sender, "type": type, "content": content, "event_id": "$%d:blue" % (self.event_id, ), "room_id": room_id, "depth": depth, "origin_server_ts": self.event_id, "prev_events": prev_events, "auth_events": auth_events, } if key is not None: event_dict["state_key"] = key event_dict["prev_state"] = prev_state if redacts is not None: event_dict["redacts"] = redacts event = FrozenEvent(event_dict, internal_metadata_dict=internal) self.event_id += 1 if state is not None: state_ids = {key: e.event_id for key, e in state.items()} context = EventContext() context.current_state_ids = state_ids context.prev_state_ids = state_ids else: state_handler = self.hs.get_state_handler() context = yield state_handler.compute_event_context(event) yield self.master_store.add_push_actions_to_staging( event.event_id, {user_id: actions for user_id, actions in push_actions}, ) ordering = None if backfill: yield self.master_store.persist_events([(event, context)], backfilled=True) else: ordering, _ = yield self.master_store.persist_event( event, context, ) if ordering: event.internal_metadata.stream_ordering = ordering defer.returnValue(event)
def event_from_pdu_json(self, pdu_json, outlier=False): event = FrozenEvent(pdu_json) event.internal_metadata.outlier = outlier return event
def make_graph(file_name, room_id, file_prefix, limit): print "Reading lines" with open(file_name) as f: lines = f.readlines() print "Read lines" events = [FrozenEvent(json.loads(line)) for line in lines] print "Loaded events." events.sort(key=lambda e: e.depth) print "Sorted events" if limit: events = events[-int(limit):] node_map = {} graph = pydot.Dot(graph_name="Test") for event in events: t = datetime.datetime.fromtimestamp( float(event.origin_server_ts) / 1000).strftime('%Y-%m-%d %H:%M:%S,%f') content = json.dumps(unfreeze(event.get_dict()["content"]), indent=4) content = content.replace("\n", "<br/>\n") print content content = [] for key, value in unfreeze(event.get_dict()["content"]).items(): if value is None: value = "<null>" elif isinstance(value, string_types): pass else: value = json.dumps(value) content.append("<b>%s</b>: %s," % ( cgi.escape(key, quote=True).encode("ascii", 'xmlcharrefreplace'), cgi.escape(value, quote=True).encode("ascii", 'xmlcharrefreplace'), )) content = "<br/>\n".join(content) print content label = ("<" "<b>%(name)s </b><br/>" "Type: <b>%(type)s </b><br/>" "State key: <b>%(state_key)s </b><br/>" "Content: <b>%(content)s </b><br/>" "Time: <b>%(time)s </b><br/>" "Depth: <b>%(depth)s </b><br/>" ">") % { "name": event.event_id, "type": event.type, "state_key": event.get("state_key", None), "content": content, "time": t, "depth": event.depth, } node = pydot.Node( name=event.event_id, label=label, ) node_map[event.event_id] = node graph.add_node(node) print "Created Nodes" for event in events: for prev_id, _ in event.prev_events: try: end_node = node_map[prev_id] except: end_node = pydot.Node( name=prev_id, label="<<b>%s</b>>" % (prev_id, ), ) node_map[prev_id] = end_node graph.add_node(end_node) edge = pydot.Edge(node_map[event.event_id], end_node) graph.add_edge(edge) print "Created edges" graph.write('%s.dot' % file_prefix, format='raw', prog='dot') print "Created Dot" graph.write_svg("%s.svg" % file_prefix, prog='dot') print "Created svg"
def _get_event_from_row(self, internal_metadata, js, redacted, check_redacted=True, get_prev_content=False, rejected_reason=None): d = json.loads(js) internal_metadata = json.loads(internal_metadata) if rejected_reason: rejected_reason = yield self._simple_select_one_onecol( table="rejections", keyvalues={"event_id": rejected_reason}, retcol="reason", desc="_get_event_from_row", ) ev = FrozenEvent( d, internal_metadata_dict=internal_metadata, rejected_reason=rejected_reason, ) if check_redacted and redacted: ev = prune_event(ev) redaction_id = yield self._simple_select_one_onecol( table="redactions", keyvalues={"redacts": ev.event_id}, retcol="event_id", desc="_get_event_from_row", ) ev.unsigned["redacted_by"] = redaction_id # Get the redaction event. because = yield self.get_event( redaction_id, check_redacted=False, allow_none=True, ) if because: # It's fine to do add the event directly, since get_pdu_json # will serialise this field correctly ev.unsigned["redacted_because"] = because if get_prev_content and "replaces_state" in ev.unsigned: prev = yield self.get_event( ev.unsigned["replaces_state"], get_prev_content=False, allow_none=True, ) if prev: ev.unsigned["prev_content"] = prev.content ev.unsigned["prev_sender"] = prev.sender self._get_event_cache.prefill( (ev.event_id, check_redacted, get_prev_content), ev) defer.returnValue(ev)