async def get_event_reference_hashes( self, event_ids: Collection[str]) -> Dict[str, Dict[str, bytes]]: """Get all hashes for given events. Args: event_ids: The event IDs to get hashes for. Returns: A mapping of event ID to a mapping of algorithm to hash. Returns an empty dict for a given event id if that event is unknown. """ events = await self.get_events( event_ids, redact_behaviour=EventRedactBehaviour.as_is, allow_rejected=True, ) hashes: Dict[str, Dict[str, bytes]] = {} for event_id in event_ids: event = events.get(event_id) if event is None: hashes[event_id] = {} else: ref_alg, ref_hash_bytes = compute_event_reference_hash(event) hashes[event_id] = {ref_alg: ref_hash_bytes} return hashes
def event_id(self): # We have to import this here as otherwise we get an import loop which # is hard to break. from synapse.crypto.event_signing import compute_event_reference_hash if self._event_id: return self._event_id self._event_id = "$" + encode_base64(compute_event_reference_hash(self)[1]) return self._event_id
def _store_event_reference_hashes_txn(self, txn, events): """Store a hash for a PDU Args: txn (cursor): events (list): list of Events. """ vals = [] for event in events: ref_alg, ref_hash_bytes = compute_event_reference_hash(event) vals.append( { "event_id": event.event_id, "algorithm": ref_alg, "hash": db_binary_type(ref_hash_bytes), } ) self._simple_insert_many_txn(txn, table="event_reference_hashes", values=vals)
def main(): parser = argparse.ArgumentParser() parser.add_argument( "input_json", nargs="?", type=argparse.FileType('r'), default=sys.stdin ) args = parser.parse_args() logging.basicConfig() event_json = dictobj(json.load(args.input_json)) algorithms = {"sha256": hashlib.sha256} for alg_name in event_json.hashes: if check_event_content_hash(event_json, algorithms[alg_name]): print("PASS content hash %s" % (alg_name,)) else: print("FAIL content hash %s" % (alg_name,)) for algorithm in algorithms.values(): name, h_bytes = compute_event_reference_hash(event_json, algorithm) print("Reference hash %s: %s" % (name, encode_base64(h_bytes)))
def main(): parser = argparse.ArgumentParser() parser.add_argument("input_json", nargs="?", type=argparse.FileType('r'), default=sys.stdin) args = parser.parse_args() logging.basicConfig() event_json = dictobj(json.load(args.input_json)) algorithms = {"sha256": hashlib.sha256} for alg_name in event_json.hashes: if check_event_content_hash(event_json, algorithms[alg_name]): print("PASS content hash %s" % (alg_name, )) else: print("FAIL content hash %s" % (alg_name, )) for algorithm in algorithms.values(): name, h_bytes = compute_event_reference_hash(event_json, algorithm) print("Reference hash %s: %s" % (name, encode_base64(h_bytes)))
def _persist_event_txn( self, txn, event, context, backfilled, stream_ordering=None, is_new_state=True, current_state=None ): # Remove the any existing cache entries for the event_id txn.call_after(self._invalidate_get_event_cache, event.event_id) # We purposefully do this first since if we include a `current_state` # key, we *want* to update the `current_state_events` table if current_state: txn.call_after(self.get_current_state_for_key.invalidate_all) txn.call_after(self.get_rooms_for_user.invalidate_all) txn.call_after(self.get_users_in_room.invalidate, event.room_id) txn.call_after(self.get_joined_hosts_for_room.invalidate, event.room_id) txn.call_after(self.get_room_name_and_aliases, event.room_id) self._simple_delete_txn(txn, table="current_state_events", keyvalues={"room_id": event.room_id}) for s in current_state: self._simple_insert_txn( txn, "current_state_events", {"event_id": s.event_id, "room_id": s.room_id, "type": s.type, "state_key": s.state_key}, ) outlier = event.internal_metadata.is_outlier() if not outlier: self._update_min_depth_for_room_txn(txn, event.room_id, event.depth) have_persisted = self._simple_select_one_txn( txn, table="events", keyvalues={"event_id": event.event_id}, retcols=["event_id", "outlier"], allow_none=True, ) metadata_json = encode_json(event.internal_metadata.get_dict(), using_frozen_dicts=USE_FROZEN_DICTS).decode( "UTF-8" ) # If we have already persisted this event, we don't need to do any # more processing. # The processing above must be done on every call to persist event, # since they might not have happened on previous calls. For example, # if we are persisting an event that we had persisted as an outlier, # but is no longer one. if have_persisted: if not outlier and have_persisted["outlier"]: self._store_state_groups_txn(txn, event, context) sql = "UPDATE event_json SET internal_metadata = ?" " WHERE event_id = ?" txn.execute(sql, (metadata_json, event.event_id)) sql = "UPDATE events SET outlier = ?" " WHERE event_id = ?" txn.execute(sql, (False, event.event_id)) return if not outlier: self._store_state_groups_txn(txn, event, context) self._handle_prev_events( txn, outlier=outlier, event_id=event.event_id, prev_events=event.prev_events, room_id=event.room_id ) if event.type == EventTypes.Member: self._store_room_member_txn(txn, event) elif event.type == EventTypes.Name: self._store_room_name_txn(txn, event) elif event.type == EventTypes.Topic: self._store_room_topic_txn(txn, event) elif event.type == EventTypes.Redaction: self._store_redaction(txn, event) event_dict = {k: v for k, v in event.get_dict().items() if k not in ["redacted", "redacted_because"]} self._simple_insert_txn( txn, table="event_json", values={ "event_id": event.event_id, "room_id": event.room_id, "internal_metadata": metadata_json, "json": encode_json(event_dict, using_frozen_dicts=USE_FROZEN_DICTS).decode("UTF-8"), }, ) content = encode_json(event.content, using_frozen_dicts=USE_FROZEN_DICTS).decode("UTF-8") vals = { "topological_ordering": event.depth, "event_id": event.event_id, "type": event.type, "room_id": event.room_id, "content": content, "processed": True, "outlier": outlier, "depth": event.depth, } unrec = { k: v for k, v in event.get_dict().items() if k not in vals.keys() and k not in ["redacted", "redacted_because", "signatures", "hashes", "prev_events"] } vals["unrecognized_keys"] = encode_json(unrec, using_frozen_dicts=USE_FROZEN_DICTS).decode("UTF-8") sql = ( "INSERT INTO events" " (stream_ordering, topological_ordering, event_id, type," " room_id, content, processed, outlier, depth)" " VALUES (?,?,?,?,?,?,?,?,?)" ) txn.execute( sql, ( stream_ordering, event.depth, event.event_id, event.type, event.room_id, content, True, outlier, event.depth, ), ) if context.rejected: self._store_rejections_txn(txn, event.event_id, context.rejected) for hash_alg, hash_base64 in event.hashes.items(): hash_bytes = decode_base64(hash_base64) self._store_event_content_hash_txn(txn, event.event_id, hash_alg, hash_bytes) for prev_event_id, prev_hashes in event.prev_events: for alg, hash_base64 in prev_hashes.items(): hash_bytes = decode_base64(hash_base64) self._store_prev_event_hash_txn(txn, event.event_id, prev_event_id, alg, hash_bytes) self._simple_insert_many_txn( txn, table="event_auth", values=[ {"event_id": event.event_id, "room_id": event.room_id, "auth_id": auth_id} for auth_id, _ in event.auth_events ], ) (ref_alg, ref_hash_bytes) = compute_event_reference_hash(event) self._store_event_reference_hash_txn(txn, event.event_id, ref_alg, ref_hash_bytes) if event.is_state(): vals = { "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, } # TODO: How does this work with backfilling? if hasattr(event, "replaces_state"): vals["prev_state"] = event.replaces_state self._simple_insert_txn(txn, "state_events", vals) self._simple_insert_many_txn( txn, table="event_edges", values=[ {"event_id": event.event_id, "prev_event_id": e_id, "room_id": event.room_id, "is_state": True} for e_id, h in event.prev_state ], ) if is_new_state and not context.rejected: txn.call_after(self.get_current_state_for_key.invalidate, event.room_id, event.type, event.state_key) if event.type == EventTypes.Name or event.type == EventTypes.Aliases: txn.call_after(self.get_room_name_and_aliases.invalidate, event.room_id) self._simple_upsert_txn( txn, "current_state_events", keyvalues={"room_id": event.room_id, "type": event.type, "state_key": event.state_key}, values={"event_id": event.event_id}, ) return
def _persist_event_txn(self, txn, event, context, backfilled, stream_ordering=None, is_new_state=True, current_state=None): # Remove the any existing cache entries for the event_id txn.call_after(self._invalidate_get_event_cache, event.event_id) if stream_ordering is None: with self._stream_id_gen.get_next_txn(txn) as stream_ordering: return self._persist_event_txn( txn, event, context, backfilled, stream_ordering=stream_ordering, is_new_state=is_new_state, current_state=current_state, ) # We purposefully do this first since if we include a `current_state` # key, we *want* to update the `current_state_events` table if current_state: self._simple_delete_txn( txn, table="current_state_events", keyvalues={"room_id": event.room_id}, ) for s in current_state: if s.type == EventTypes.Member: txn.call_after(self.get_rooms_for_user.invalidate, s.state_key) txn.call_after(self.get_joined_hosts_for_room.invalidate, s.room_id) self._simple_insert_txn( txn, "current_state_events", { "event_id": s.event_id, "room_id": s.room_id, "type": s.type, "state_key": s.state_key, }) outlier = event.internal_metadata.is_outlier() if not outlier: self._store_state_groups_txn(txn, event, context) self._update_min_depth_for_room_txn(txn, event.room_id, event.depth) have_persisted = self._simple_select_one_onecol_txn( txn, table="event_json", keyvalues={"event_id": event.event_id}, retcol="event_id", allow_none=True, ) metadata_json = encode_canonical_json( event.internal_metadata.get_dict()).decode("UTF-8") # If we have already persisted this event, we don't need to do any # more processing. # The processing above must be done on every call to persist event, # since they might not have happened on previous calls. For example, # if we are persisting an event that we had persisted as an outlier, # but is no longer one. if have_persisted: if not outlier: sql = ("UPDATE event_json SET internal_metadata = ?" " WHERE event_id = ?") txn.execute(sql, ( metadata_json, event.event_id, )) sql = ("UPDATE events SET outlier = ?" " WHERE event_id = ?") txn.execute(sql, ( False, event.event_id, )) return self._handle_prev_events( txn, outlier=outlier, event_id=event.event_id, prev_events=event.prev_events, room_id=event.room_id, ) if event.type == EventTypes.Member: self._store_room_member_txn(txn, event) elif event.type == EventTypes.Name: self._store_room_name_txn(txn, event) elif event.type == EventTypes.Topic: self._store_room_topic_txn(txn, event) elif event.type == EventTypes.Redaction: self._store_redaction(txn, event) event_dict = { k: v for k, v in event.get_dict().items() if k not in [ "redacted", "redacted_because", ] } self._simple_insert_txn( txn, table="event_json", values={ "event_id": event.event_id, "room_id": event.room_id, "internal_metadata": metadata_json, "json": encode_canonical_json(event_dict).decode("UTF-8"), }, ) content = encode_canonical_json(event.content).decode("UTF-8") vals = { "topological_ordering": event.depth, "event_id": event.event_id, "type": event.type, "room_id": event.room_id, "content": content, "processed": True, "outlier": outlier, "depth": event.depth, } unrec = { k: v for k, v in event.get_dict().items() if k not in vals.keys() and k not in [ "redacted", "redacted_because", "signatures", "hashes", "prev_events", ] } vals["unrecognized_keys"] = encode_canonical_json(unrec).decode( "UTF-8") sql = ("INSERT INTO events" " (stream_ordering, topological_ordering, event_id, type," " room_id, content, processed, outlier, depth)" " VALUES (?,?,?,?,?,?,?,?,?)") txn.execute(sql, (stream_ordering, event.depth, event.event_id, event.type, event.room_id, content, True, outlier, event.depth)) if context.rejected: self._store_rejections_txn(txn, event.event_id, context.rejected) for hash_alg, hash_base64 in event.hashes.items(): hash_bytes = decode_base64(hash_base64) self._store_event_content_hash_txn( txn, event.event_id, hash_alg, hash_bytes, ) for prev_event_id, prev_hashes in event.prev_events: for alg, hash_base64 in prev_hashes.items(): hash_bytes = decode_base64(hash_base64) self._store_prev_event_hash_txn(txn, event.event_id, prev_event_id, alg, hash_bytes) self._simple_insert_many_txn( txn, table="event_auth", values=[{ "event_id": event.event_id, "room_id": event.room_id, "auth_id": auth_id, } for auth_id, _ in event.auth_events], ) (ref_alg, ref_hash_bytes) = compute_event_reference_hash(event) self._store_event_reference_hash_txn(txn, event.event_id, ref_alg, ref_hash_bytes) if event.is_state(): vals = { "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, } # TODO: How does this work with backfilling? if hasattr(event, "replaces_state"): vals["prev_state"] = event.replaces_state self._simple_insert_txn( txn, "state_events", vals, ) self._simple_insert_many_txn( txn, table="event_edges", values=[{ "event_id": event.event_id, "prev_event_id": e_id, "room_id": event.room_id, "is_state": True, } for e_id, h in event.prev_state], ) if is_new_state and not context.rejected: self._simple_upsert_txn(txn, "current_state_events", keyvalues={ "room_id": event.room_id, "type": event.type, "state_key": event.state_key, }, values={ "event_id": event.event_id, }) return
def _persist_event_txn(self, txn, event, context, backfilled, stream_ordering=None, is_new_state=True, current_state=None): if event.type == EventTypes.Member: self._store_room_member_txn(txn, event) elif event.type == EventTypes.Feedback: self._store_feedback_txn(txn, event) elif event.type == EventTypes.Name: self._store_room_name_txn(txn, event) elif event.type == EventTypes.Topic: self._store_room_topic_txn(txn, event) elif event.type == EventTypes.Redaction: self._store_redaction(txn, event) outlier = event.internal_metadata.is_outlier() event_dict = { k: v for k, v in event.get_dict().items() if k not in [ "redacted", "redacted_because", ] } metadata_json = encode_canonical_json( event.internal_metadata.get_dict()) self._simple_insert_txn( txn, table="event_json", values={ "event_id": event.event_id, "room_id": event.room_id, "internal_metadata": metadata_json.decode("UTF-8"), "json": encode_canonical_json(event_dict).decode("UTF-8"), }, or_replace=True, ) vals = { "topological_ordering": event.depth, "event_id": event.event_id, "type": event.type, "room_id": event.room_id, "content": json.dumps(event.get_dict()["content"]), "processed": True, "outlier": outlier, "depth": event.depth, } if stream_ordering is not None: vals["stream_ordering"] = stream_ordering unrec = { k: v for k, v in event.get_dict().items() if k not in vals.keys() and k not in [ "redacted", "redacted_because", "signatures", "hashes", "prev_events", ] } vals["unrecognized_keys"] = json.dumps(unrec) try: self._simple_insert_txn( txn, "events", vals, or_replace=(not outlier), or_ignore=bool(outlier), ) except: logger.warn( "Failed to persist, probably duplicate: %s", event.event_id, exc_info=True, ) raise _RollbackButIsFineException("_persist_event") self._handle_prev_events( txn, outlier=outlier, event_id=event.event_id, prev_events=event.prev_events, room_id=event.room_id, ) if not outlier: self._store_state_groups_txn(txn, event, context) if current_state: txn.execute("DELETE FROM current_state_events WHERE room_id = ?", (event.room_id, )) for s in current_state: self._simple_insert_txn( txn, "current_state_events", { "event_id": s.event_id, "room_id": s.room_id, "type": s.type, "state_key": s.state_key, }, or_replace=True, ) is_state = hasattr(event, "state_key") and event.state_key is not None if is_state: vals = { "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, } if hasattr(event, "replaces_state"): vals["prev_state"] = event.replaces_state self._simple_insert_txn( txn, "state_events", vals, or_replace=True, ) if is_new_state: self._simple_insert_txn( txn, "current_state_events", { "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, }, or_replace=True, ) for e_id, h in event.prev_state: self._simple_insert_txn( txn, table="event_edges", values={ "event_id": event.event_id, "prev_event_id": e_id, "room_id": event.room_id, "is_state": 1, }, or_ignore=True, ) if not backfilled: self._simple_insert_txn( txn, table="state_forward_extremities", values={ "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, }, or_replace=True, ) for prev_state_id, _ in event.prev_state: self._simple_delete_txn(txn, table="state_forward_extremities", keyvalues={ "event_id": prev_state_id, }) for hash_alg, hash_base64 in event.hashes.items(): hash_bytes = decode_base64(hash_base64) self._store_event_content_hash_txn( txn, event.event_id, hash_alg, hash_bytes, ) for prev_event_id, prev_hashes in event.prev_events: for alg, hash_base64 in prev_hashes.items(): hash_bytes = decode_base64(hash_base64) self._store_prev_event_hash_txn(txn, event.event_id, prev_event_id, alg, hash_bytes) for auth_id, _ in event.auth_events: self._simple_insert_txn( txn, table="event_auth", values={ "event_id": event.event_id, "room_id": event.room_id, "auth_id": auth_id, }, or_ignore=True, ) (ref_alg, ref_hash_bytes) = compute_event_reference_hash(event) self._store_event_reference_hash_txn(txn, event.event_id, ref_alg, ref_hash_bytes) if not outlier: self._update_min_depth_for_room_txn(txn, event.room_id, event.depth)
async def _locally_reject_invite( self, invite_event: EventBase, txn_id: Optional[str], requester: Requester, content: JsonDict, ) -> Tuple[str, int]: """Generate a local invite rejection This is called after we fail to reject an invite via a remote server. It generates an out-of-band membership event locally. Args: invite_event: the invite to be rejected txn_id: optional transaction ID supplied by the client requester: user making the rejection request, according to the access token content: additional content to include in the rejection event. Normally an empty dict. """ room_id = invite_event.room_id target_user = invite_event.state_key room_version = await self.store.get_room_version(room_id) content["membership"] = Membership.LEAVE # the auth events for the new event are the same as that of the invite, plus # the invite itself. # # the prev_events are just the invite. invite_hash = invite_event.event_id # type: Union[str, Tuple] if room_version.event_format == EventFormatVersions.V1: alg, h = compute_event_reference_hash(invite_event) invite_hash = (invite_event.event_id, {alg: encode_base64(h)}) auth_events = tuple(invite_event.auth_events) + (invite_hash, ) prev_events = (invite_hash, ) # we cap depth of generated events, to ensure that they are not # rejected by other servers (and so that they can be persisted in # the db) depth = min(invite_event.depth + 1, MAX_DEPTH) event_dict = { "depth": depth, "auth_events": auth_events, "prev_events": prev_events, "type": EventTypes.Member, "room_id": room_id, "sender": target_user, "content": content, "state_key": target_user, } event = create_local_event_from_event_dict( clock=self.clock, hostname=self.hs.hostname, signing_key=self.hs.signing_key, room_version=room_version, event_dict=event_dict, ) event.internal_metadata.outlier = True event.internal_metadata.out_of_band_membership = True if txn_id is not None: event.internal_metadata.txn_id = txn_id if requester.access_token_id is not None: event.internal_metadata.token_id = requester.access_token_id EventValidator().validate_new(event, self.config) context = await self.state_handler.compute_event_context(event) context.app_service = requester.app_service stream_id = await self.event_creation_handler.handle_new_client_event( requester, event, context, extra_users=[UserID.from_string(target_user)], ) return event.event_id, stream_id
def _persist_event_txn(self, txn, event, context, backfilled, stream_ordering=None, is_new_state=True, current_state=None): # Remove the any existing cache entries for the event_id self._get_event_cache.pop(event.event_id) # We purposefully do this first since if we include a `current_state` # key, we *want* to update the `current_state_events` table if current_state: txn.execute("DELETE FROM current_state_events WHERE room_id = ?", (event.room_id, )) for s in current_state: self._simple_insert_txn( txn, "current_state_events", { "event_id": s.event_id, "room_id": s.room_id, "type": s.type, "state_key": s.state_key, }, or_replace=True, ) if event.is_state() and is_new_state: if not backfilled and not context.rejected: self._simple_insert_txn( txn, table="state_forward_extremities", values={ "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, }, or_replace=True, ) for prev_state_id, _ in event.prev_state: self._simple_delete_txn(txn, table="state_forward_extremities", keyvalues={ "event_id": prev_state_id, }) outlier = event.internal_metadata.is_outlier() if not outlier: self._store_state_groups_txn(txn, event, context) self._update_min_depth_for_room_txn(txn, event.room_id, event.depth) self._handle_prev_events( txn, outlier=outlier, event_id=event.event_id, prev_events=event.prev_events, room_id=event.room_id, ) have_persisted = self._simple_select_one_onecol_txn( txn, table="event_json", keyvalues={"event_id": event.event_id}, retcol="event_id", allow_none=True, ) metadata_json = encode_canonical_json( event.internal_metadata.get_dict()) # If we have already persisted this event, we don't need to do any # more processing. # The processing above must be done on every call to persist event, # since they might not have happened on previous calls. For example, # if we are persisting an event that we had persisted as an outlier, # but is no longer one. if have_persisted: if not outlier: sql = ("UPDATE event_json SET internal_metadata = ?" " WHERE event_id = ?") txn.execute(sql, ( metadata_json.decode("UTF-8"), event.event_id, )) sql = ("UPDATE events SET outlier = 0" " WHERE event_id = ?") txn.execute(sql, (event.event_id, )) return if event.type == EventTypes.Member: self._store_room_member_txn(txn, event) elif event.type == EventTypes.Feedback: self._store_feedback_txn(txn, event) elif event.type == EventTypes.Name: self._store_room_name_txn(txn, event) elif event.type == EventTypes.Topic: self._store_room_topic_txn(txn, event) elif event.type == EventTypes.Redaction: self._store_redaction(txn, event) event_dict = { k: v for k, v in event.get_dict().items() if k not in [ "redacted", "redacted_because", ] } self._simple_insert_txn( txn, table="event_json", values={ "event_id": event.event_id, "room_id": event.room_id, "internal_metadata": metadata_json.decode("UTF-8"), "json": encode_canonical_json(event_dict).decode("UTF-8"), }, or_replace=True, ) content = encode_canonical_json(event.content).decode("UTF-8") vals = { "topological_ordering": event.depth, "event_id": event.event_id, "type": event.type, "room_id": event.room_id, "content": content, "processed": True, "outlier": outlier, "depth": event.depth, } if stream_ordering is not None: vals["stream_ordering"] = stream_ordering unrec = { k: v for k, v in event.get_dict().items() if k not in vals.keys() and k not in [ "redacted", "redacted_because", "signatures", "hashes", "prev_events", ] } vals["unrecognized_keys"] = encode_canonical_json(unrec).decode( "UTF-8") try: self._simple_insert_txn( txn, "events", vals, or_replace=(not outlier), or_ignore=bool(outlier), ) except: logger.warn( "Failed to persist, probably duplicate: %s", event.event_id, exc_info=True, ) raise _RollbackButIsFineException("_persist_event") if context.rejected: self._store_rejections_txn(txn, event.event_id, context.rejected) if event.is_state(): vals = { "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, } # TODO: How does this work with backfilling? if hasattr(event, "replaces_state"): vals["prev_state"] = event.replaces_state self._simple_insert_txn( txn, "state_events", vals, or_replace=True, ) if is_new_state and not context.rejected: self._simple_insert_txn( txn, "current_state_events", { "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, }, or_replace=True, ) for e_id, h in event.prev_state: self._simple_insert_txn( txn, table="event_edges", values={ "event_id": event.event_id, "prev_event_id": e_id, "room_id": event.room_id, "is_state": 1, }, or_ignore=True, ) for hash_alg, hash_base64 in event.hashes.items(): hash_bytes = decode_base64(hash_base64) self._store_event_content_hash_txn( txn, event.event_id, hash_alg, hash_bytes, ) for prev_event_id, prev_hashes in event.prev_events: for alg, hash_base64 in prev_hashes.items(): hash_bytes = decode_base64(hash_base64) self._store_prev_event_hash_txn(txn, event.event_id, prev_event_id, alg, hash_bytes) for auth_id, _ in event.auth_events: self._simple_insert_txn( txn, table="event_auth", values={ "event_id": event.event_id, "room_id": event.room_id, "auth_id": auth_id, }, or_ignore=True, ) (ref_alg, ref_hash_bytes) = compute_event_reference_hash(event) self._store_event_reference_hash_txn(txn, event.event_id, ref_alg, ref_hash_bytes)
def _persist_event_txn(self, txn, event, backfilled, stream_ordering=None, is_new_state=True, current_state=None): if event.type == RoomMemberEvent.TYPE: self._store_room_member_txn(txn, event) elif event.type == FeedbackEvent.TYPE: self._store_feedback_txn(txn, event) elif event.type == RoomNameEvent.TYPE: self._store_room_name_txn(txn, event) elif event.type == RoomTopicEvent.TYPE: self._store_room_topic_txn(txn, event) elif event.type == RoomRedactionEvent.TYPE: self._store_redaction(txn, event) outlier = False if hasattr(event, "outlier"): outlier = event.outlier vals = { "topological_ordering": event.depth, "event_id": event.event_id, "type": event.type, "room_id": event.room_id, "content": json.dumps(event.content), "processed": True, "outlier": outlier, "depth": event.depth, } if stream_ordering is not None: vals["stream_ordering"] = stream_ordering unrec = { k: v for k, v in event.get_full_dict().items() if k not in vals.keys() and k not in [ "redacted", "redacted_because", "signatures", "hashes", "prev_events", ] } vals["unrecognized_keys"] = json.dumps(unrec) try: self._simple_insert_txn( txn, "events", vals, or_replace=(not outlier), or_ignore=bool(outlier), ) except: logger.warn( "Failed to persist, probably duplicate: %s", event.event_id, exc_info=True, ) raise _RollbackButIsFineException("_persist_event") self._handle_prev_events( txn, outlier=outlier, event_id=event.event_id, prev_events=event.prev_events, room_id=event.room_id, ) self._store_state_groups_txn(txn, event) if current_state: txn.execute( "DELETE FROM current_state_events WHERE room_id = ?", (event.room_id,) ) for s in current_state: self._simple_insert_txn( txn, "current_state_events", { "event_id": s.event_id, "room_id": s.room_id, "type": s.type, "state_key": s.state_key, }, or_replace=True, ) is_state = hasattr(event, "state_key") and event.state_key is not None if is_state: vals = { "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, } if hasattr(event, "replaces_state"): vals["prev_state"] = event.replaces_state self._simple_insert_txn( txn, "state_events", vals, or_replace=True, ) if is_new_state: self._simple_insert_txn( txn, "current_state_events", { "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, }, or_replace=True, ) for e_id, h in event.prev_state: self._simple_insert_txn( txn, table="event_edges", values={ "event_id": event.event_id, "prev_event_id": e_id, "room_id": event.room_id, "is_state": 1, }, or_ignore=True, ) if not backfilled: self._simple_insert_txn( txn, table="state_forward_extremities", values={ "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, }, or_replace=True, ) for prev_state_id, _ in event.prev_state: self._simple_delete_txn( txn, table="state_forward_extremities", keyvalues={ "event_id": prev_state_id, } ) for hash_alg, hash_base64 in event.hashes.items(): hash_bytes = decode_base64(hash_base64) self._store_event_content_hash_txn( txn, event.event_id, hash_alg, hash_bytes, ) if hasattr(event, "signatures"): logger.debug("sigs: %s", event.signatures) for name, sigs in event.signatures.items(): for key_id, signature_base64 in sigs.items(): signature_bytes = decode_base64(signature_base64) self._store_event_signature_txn( txn, event.event_id, name, key_id, signature_bytes, ) for prev_event_id, prev_hashes in event.prev_events: for alg, hash_base64 in prev_hashes.items(): hash_bytes = decode_base64(hash_base64) self._store_prev_event_hash_txn( txn, event.event_id, prev_event_id, alg, hash_bytes ) for auth_id, _ in event.auth_events: self._simple_insert_txn( txn, table="event_auth", values={ "event_id": event.event_id, "room_id": event.room_id, "auth_id": auth_id, }, or_ignore=True, ) (ref_alg, ref_hash_bytes) = compute_event_reference_hash(event) self._store_event_reference_hash_txn( txn, event.event_id, ref_alg, ref_hash_bytes ) if not outlier: self._update_min_depth_for_room_txn( txn, event.room_id, event.depth )
def _persist_event_txn(self, txn, event, context, backfilled, stream_ordering=None, is_new_state=True, current_state=None): # Remove the any existing cache entries for the event_id self._get_event_cache.pop(event.event_id) # We purposefully do this first since if we include a `current_state` # key, we *want* to update the `current_state_events` table if current_state: txn.execute( "DELETE FROM current_state_events WHERE room_id = ?", (event.room_id,) ) for s in current_state: self._simple_insert_txn( txn, "current_state_events", { "event_id": s.event_id, "room_id": s.room_id, "type": s.type, "state_key": s.state_key, }, or_replace=True, ) if event.is_state() and is_new_state: if not backfilled and not context.rejected: self._simple_insert_txn( txn, table="state_forward_extremities", values={ "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, }, or_replace=True, ) for prev_state_id, _ in event.prev_state: self._simple_delete_txn( txn, table="state_forward_extremities", keyvalues={ "event_id": prev_state_id, } ) outlier = event.internal_metadata.is_outlier() if not outlier: self._store_state_groups_txn(txn, event, context) self._update_min_depth_for_room_txn( txn, event.room_id, event.depth ) self._handle_prev_events( txn, outlier=outlier, event_id=event.event_id, prev_events=event.prev_events, room_id=event.room_id, ) have_persisted = self._simple_select_one_onecol_txn( txn, table="event_json", keyvalues={"event_id": event.event_id}, retcol="event_id", allow_none=True, ) metadata_json = encode_canonical_json( event.internal_metadata.get_dict() ) # If we have already persisted this event, we don't need to do any # more processing. # The processing above must be done on every call to persist event, # since they might not have happened on previous calls. For example, # if we are persisting an event that we had persisted as an outlier, # but is no longer one. if have_persisted: if not outlier: sql = ( "UPDATE event_json SET internal_metadata = ?" " WHERE event_id = ?" ) txn.execute( sql, (metadata_json.decode("UTF-8"), event.event_id,) ) sql = ( "UPDATE events SET outlier = 0" " WHERE event_id = ?" ) txn.execute( sql, (event.event_id,) ) return if event.type == EventTypes.Member: self._store_room_member_txn(txn, event) elif event.type == EventTypes.Feedback: self._store_feedback_txn(txn, event) elif event.type == EventTypes.Name: self._store_room_name_txn(txn, event) elif event.type == EventTypes.Topic: self._store_room_topic_txn(txn, event) elif event.type == EventTypes.Redaction: self._store_redaction(txn, event) event_dict = { k: v for k, v in event.get_dict().items() if k not in [ "redacted", "redacted_because", ] } self._simple_insert_txn( txn, table="event_json", values={ "event_id": event.event_id, "room_id": event.room_id, "internal_metadata": metadata_json.decode("UTF-8"), "json": encode_canonical_json(event_dict).decode("UTF-8"), }, or_replace=True, ) content = encode_canonical_json( event.content ).decode("UTF-8") vals = { "topological_ordering": event.depth, "event_id": event.event_id, "type": event.type, "room_id": event.room_id, "content": content, "processed": True, "outlier": outlier, "depth": event.depth, } if stream_ordering is not None: vals["stream_ordering"] = stream_ordering unrec = { k: v for k, v in event.get_dict().items() if k not in vals.keys() and k not in [ "redacted", "redacted_because", "signatures", "hashes", "prev_events", ] } vals["unrecognized_keys"] = encode_canonical_json( unrec ).decode("UTF-8") try: self._simple_insert_txn( txn, "events", vals, or_replace=(not outlier), or_ignore=bool(outlier), ) except: logger.warn( "Failed to persist, probably duplicate: %s", event.event_id, exc_info=True, ) raise _RollbackButIsFineException("_persist_event") if context.rejected: self._store_rejections_txn(txn, event.event_id, context.rejected) if event.is_state(): vals = { "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, } # TODO: How does this work with backfilling? if hasattr(event, "replaces_state"): vals["prev_state"] = event.replaces_state self._simple_insert_txn( txn, "state_events", vals, or_replace=True, ) if is_new_state and not context.rejected: self._simple_insert_txn( txn, "current_state_events", { "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, }, or_replace=True, ) for e_id, h in event.prev_state: self._simple_insert_txn( txn, table="event_edges", values={ "event_id": event.event_id, "prev_event_id": e_id, "room_id": event.room_id, "is_state": 1, }, or_ignore=True, ) for hash_alg, hash_base64 in event.hashes.items(): hash_bytes = decode_base64(hash_base64) self._store_event_content_hash_txn( txn, event.event_id, hash_alg, hash_bytes, ) for prev_event_id, prev_hashes in event.prev_events: for alg, hash_base64 in prev_hashes.items(): hash_bytes = decode_base64(hash_base64) self._store_prev_event_hash_txn( txn, event.event_id, prev_event_id, alg, hash_bytes ) for auth_id, _ in event.auth_events: self._simple_insert_txn( txn, table="event_auth", values={ "event_id": event.event_id, "room_id": event.room_id, "auth_id": auth_id, }, or_ignore=True, ) (ref_alg, ref_hash_bytes) = compute_event_reference_hash(event) self._store_event_reference_hash_txn( txn, event.event_id, ref_alg, ref_hash_bytes )