Example #1
0
def respond_with_json(request, code, json_object, send_cors=False,
                      response_code_message=None, pretty_print=False,
                      version_string="", canonical_json=True):
    if pretty_print:
        json_bytes = encode_pretty_printed_json(json_object) + "\n"
    else:
        if canonical_json:
            json_bytes = encode_canonical_json(json_object)
        else:
            json_bytes = encode_json(
                json_object, using_frozen_dicts=synapse.events.USE_FROZEN_DICTS
            )

    return respond_with_json_bytes(
        request, code, json_bytes,
        send_cors=send_cors,
        response_code_message=response_code_message,
        version_string=version_string
    )
Example #2
0
def respond_with_json(request,
                      code,
                      json_object,
                      send_cors=False,
                      response_code_message=None,
                      pretty_print=False,
                      version_string="",
                      canonical_json=True):
    if pretty_print:
        json_bytes = encode_pretty_printed_json(json_object) + "\n"
    else:
        if canonical_json:
            json_bytes = encode_canonical_json(json_object)
        else:
            json_bytes = encode_json(
                json_object,
                using_frozen_dicts=synapse.events.USE_FROZEN_DICTS)

    return respond_with_json_bytes(request,
                                   code,
                                   json_bytes,
                                   send_cors=send_cors,
                                   response_code_message=response_code_message,
                                   version_string=version_string)
Example #3
0
    def _persist_event_txn(
        self, txn, event, context, backfilled, stream_ordering=None, is_new_state=True, current_state=None
    ):

        # Remove the any existing cache entries for the event_id
        txn.call_after(self._invalidate_get_event_cache, event.event_id)

        # We purposefully do this first since if we include a `current_state`
        # key, we *want* to update the `current_state_events` table
        if current_state:
            txn.call_after(self.get_current_state_for_key.invalidate_all)
            txn.call_after(self.get_rooms_for_user.invalidate_all)
            txn.call_after(self.get_users_in_room.invalidate, event.room_id)
            txn.call_after(self.get_joined_hosts_for_room.invalidate, event.room_id)
            txn.call_after(self.get_room_name_and_aliases, event.room_id)

            self._simple_delete_txn(txn, table="current_state_events", keyvalues={"room_id": event.room_id})

            for s in current_state:
                self._simple_insert_txn(
                    txn,
                    "current_state_events",
                    {"event_id": s.event_id, "room_id": s.room_id, "type": s.type, "state_key": s.state_key},
                )

        outlier = event.internal_metadata.is_outlier()

        if not outlier:
            self._update_min_depth_for_room_txn(txn, event.room_id, event.depth)

        have_persisted = self._simple_select_one_txn(
            txn,
            table="events",
            keyvalues={"event_id": event.event_id},
            retcols=["event_id", "outlier"],
            allow_none=True,
        )

        metadata_json = encode_json(event.internal_metadata.get_dict(), using_frozen_dicts=USE_FROZEN_DICTS).decode(
            "UTF-8"
        )

        # If we have already persisted this event, we don't need to do any
        # more processing.
        # The processing above must be done on every call to persist event,
        # since they might not have happened on previous calls. For example,
        # if we are persisting an event that we had persisted as an outlier,
        # but is no longer one.
        if have_persisted:
            if not outlier and have_persisted["outlier"]:
                self._store_state_groups_txn(txn, event, context)

                sql = "UPDATE event_json SET internal_metadata = ?" " WHERE event_id = ?"
                txn.execute(sql, (metadata_json, event.event_id))

                sql = "UPDATE events SET outlier = ?" " WHERE event_id = ?"
                txn.execute(sql, (False, event.event_id))
            return

        if not outlier:
            self._store_state_groups_txn(txn, event, context)

        self._handle_prev_events(
            txn, outlier=outlier, event_id=event.event_id, prev_events=event.prev_events, room_id=event.room_id
        )

        if event.type == EventTypes.Member:
            self._store_room_member_txn(txn, event)
        elif event.type == EventTypes.Name:
            self._store_room_name_txn(txn, event)
        elif event.type == EventTypes.Topic:
            self._store_room_topic_txn(txn, event)
        elif event.type == EventTypes.Redaction:
            self._store_redaction(txn, event)

        event_dict = {k: v for k, v in event.get_dict().items() if k not in ["redacted", "redacted_because"]}

        self._simple_insert_txn(
            txn,
            table="event_json",
            values={
                "event_id": event.event_id,
                "room_id": event.room_id,
                "internal_metadata": metadata_json,
                "json": encode_json(event_dict, using_frozen_dicts=USE_FROZEN_DICTS).decode("UTF-8"),
            },
        )

        content = encode_json(event.content, using_frozen_dicts=USE_FROZEN_DICTS).decode("UTF-8")

        vals = {
            "topological_ordering": event.depth,
            "event_id": event.event_id,
            "type": event.type,
            "room_id": event.room_id,
            "content": content,
            "processed": True,
            "outlier": outlier,
            "depth": event.depth,
        }

        unrec = {
            k: v
            for k, v in event.get_dict().items()
            if k not in vals.keys() and k not in ["redacted", "redacted_because", "signatures", "hashes", "prev_events"]
        }

        vals["unrecognized_keys"] = encode_json(unrec, using_frozen_dicts=USE_FROZEN_DICTS).decode("UTF-8")

        sql = (
            "INSERT INTO events"
            " (stream_ordering, topological_ordering, event_id, type,"
            " room_id, content, processed, outlier, depth)"
            " VALUES (?,?,?,?,?,?,?,?,?)"
        )

        txn.execute(
            sql,
            (
                stream_ordering,
                event.depth,
                event.event_id,
                event.type,
                event.room_id,
                content,
                True,
                outlier,
                event.depth,
            ),
        )

        if context.rejected:
            self._store_rejections_txn(txn, event.event_id, context.rejected)

        for hash_alg, hash_base64 in event.hashes.items():
            hash_bytes = decode_base64(hash_base64)
            self._store_event_content_hash_txn(txn, event.event_id, hash_alg, hash_bytes)

        for prev_event_id, prev_hashes in event.prev_events:
            for alg, hash_base64 in prev_hashes.items():
                hash_bytes = decode_base64(hash_base64)
                self._store_prev_event_hash_txn(txn, event.event_id, prev_event_id, alg, hash_bytes)

        self._simple_insert_many_txn(
            txn,
            table="event_auth",
            values=[
                {"event_id": event.event_id, "room_id": event.room_id, "auth_id": auth_id}
                for auth_id, _ in event.auth_events
            ],
        )

        (ref_alg, ref_hash_bytes) = compute_event_reference_hash(event)
        self._store_event_reference_hash_txn(txn, event.event_id, ref_alg, ref_hash_bytes)

        if event.is_state():
            vals = {
                "event_id": event.event_id,
                "room_id": event.room_id,
                "type": event.type,
                "state_key": event.state_key,
            }

            # TODO: How does this work with backfilling?
            if hasattr(event, "replaces_state"):
                vals["prev_state"] = event.replaces_state

            self._simple_insert_txn(txn, "state_events", vals)

            self._simple_insert_many_txn(
                txn,
                table="event_edges",
                values=[
                    {"event_id": event.event_id, "prev_event_id": e_id, "room_id": event.room_id, "is_state": True}
                    for e_id, h in event.prev_state
                ],
            )

            if is_new_state and not context.rejected:
                txn.call_after(self.get_current_state_for_key.invalidate, event.room_id, event.type, event.state_key)

                if event.type == EventTypes.Name or event.type == EventTypes.Aliases:
                    txn.call_after(self.get_room_name_and_aliases.invalidate, event.room_id)

                self._simple_upsert_txn(
                    txn,
                    "current_state_events",
                    keyvalues={"room_id": event.room_id, "type": event.type, "state_key": event.state_key},
                    values={"event_id": event.event_id},
                )

        return
Example #4
0
    def _persist_events_txn(self, txn, events_and_contexts, backfilled,
                            is_new_state=True):

        # Remove the any existing cache entries for the event_ids
        for event, _ in events_and_contexts:
            txn.call_after(self._invalidate_get_event_cache, event.event_id)

        depth_updates = {}
        for event, _ in events_and_contexts:
            if event.internal_metadata.is_outlier():
                continue
            depth_updates[event.room_id] = max(
                event.depth, depth_updates.get(event.room_id, event.depth)
            )

        for room_id, depth in depth_updates.items():
            self._update_min_depth_for_room_txn(txn, room_id, depth)

        txn.execute(
            "SELECT event_id, outlier FROM events WHERE event_id in (%s)" % (
                ",".join(["?"] * len(events_and_contexts)),
            ),
            [event.event_id for event, _ in events_and_contexts]
        )
        have_persisted = {
            event_id: outlier
            for event_id, outlier in txn.fetchall()
        }

        event_map = {}
        to_remove = set()
        for event, context in events_and_contexts:
            # Handle the case of the list including the same event multiple
            # times. The tricky thing here is when they differ by whether
            # they are an outlier.
            if event.event_id in event_map:
                other = event_map[event.event_id]

                if not other.internal_metadata.is_outlier():
                    to_remove.add(event)
                    continue
                elif not event.internal_metadata.is_outlier():
                    to_remove.add(event)
                    continue
                else:
                    to_remove.add(other)

            event_map[event.event_id] = event

            if event.event_id not in have_persisted:
                continue

            to_remove.add(event)

            outlier_persisted = have_persisted[event.event_id]
            if not event.internal_metadata.is_outlier() and outlier_persisted:
                self._store_state_groups_txn(
                    txn, event, context,
                )

                metadata_json = encode_json(
                    event.internal_metadata.get_dict(),
                    using_frozen_dicts=USE_FROZEN_DICTS
                ).decode("UTF-8")

                sql = (
                    "UPDATE event_json SET internal_metadata = ?"
                    " WHERE event_id = ?"
                )
                txn.execute(
                    sql,
                    (metadata_json, event.event_id,)
                )

                sql = (
                    "UPDATE events SET outlier = ?"
                    " WHERE event_id = ?"
                )
                txn.execute(
                    sql,
                    (False, event.event_id,)
                )

                self._update_extremeties(txn, [event])

        events_and_contexts = filter(
            lambda ec: ec[0] not in to_remove,
            events_and_contexts
        )

        if not events_and_contexts:
            return

        self._store_mult_state_groups_txn(txn, [
            (event, context)
            for event, context in events_and_contexts
            if not event.internal_metadata.is_outlier()
        ])

        self._handle_mult_prev_events(
            txn,
            events=[event for event, _ in events_and_contexts],
        )

        for event, _ in events_and_contexts:
            if event.type == EventTypes.Name:
                self._store_room_name_txn(txn, event)
            elif event.type == EventTypes.Topic:
                self._store_room_topic_txn(txn, event)
            elif event.type == EventTypes.Redaction:
                self._store_redaction(txn, event)

        self._store_room_members_txn(
            txn,
            [
                event
                for event, _ in events_and_contexts
                if event.type == EventTypes.Member
            ]
        )

        def event_dict(event):
            return {
                k: v
                for k, v in event.get_dict().items()
                if k not in [
                    "redacted",
                    "redacted_because",
                ]
            }

        self._simple_insert_many_txn(
            txn,
            table="event_json",
            values=[
                {
                    "event_id": event.event_id,
                    "room_id": event.room_id,
                    "internal_metadata": encode_json(
                        event.internal_metadata.get_dict(),
                        using_frozen_dicts=USE_FROZEN_DICTS
                    ).decode("UTF-8"),
                    "json": encode_json(
                        event_dict(event), using_frozen_dicts=USE_FROZEN_DICTS
                    ).decode("UTF-8"),
                }
                for event, _ in events_and_contexts
            ],
        )

        self._simple_insert_many_txn(
            txn,
            table="events",
            values=[
                {
                    "stream_ordering": event.internal_metadata.stream_ordering,
                    "topological_ordering": event.depth,
                    "depth": event.depth,
                    "event_id": event.event_id,
                    "room_id": event.room_id,
                    "type": event.type,
                    "processed": True,
                    "outlier": event.internal_metadata.is_outlier(),
                    "content": encode_json(
                        event.content, using_frozen_dicts=USE_FROZEN_DICTS
                    ).decode("UTF-8"),
                }
                for event, _ in events_and_contexts
            ],
        )

        if context.rejected:
            self._store_rejections_txn(
                txn, event.event_id, context.rejected
            )

        self._simple_insert_many_txn(
            txn,
            table="event_auth",
            values=[
                {
                    "event_id": event.event_id,
                    "room_id": event.room_id,
                    "auth_id": auth_id,
                }
                for event, _ in events_and_contexts
                for auth_id, _ in event.auth_events
            ],
        )

        self._store_event_reference_hashes_txn(
            txn, [event for event, _ in events_and_contexts]
        )

        state_events_and_contexts = filter(
            lambda i: i[0].is_state(),
            events_and_contexts,
        )

        state_values = []
        for event, context in state_events_and_contexts:
            vals = {
                "event_id": event.event_id,
                "room_id": event.room_id,
                "type": event.type,
                "state_key": event.state_key,
            }

            # TODO: How does this work with backfilling?
            if hasattr(event, "replaces_state"):
                vals["prev_state"] = event.replaces_state

            state_values.append(vals)

        self._simple_insert_many_txn(
            txn,
            table="state_events",
            values=state_values,
        )

        self._simple_insert_many_txn(
            txn,
            table="event_edges",
            values=[
                {
                    "event_id": event.event_id,
                    "prev_event_id": prev_id,
                    "room_id": event.room_id,
                    "is_state": True,
                }
                for event, _ in state_events_and_contexts
                for prev_id, _ in event.prev_state
            ],
        )

        if is_new_state:
            for event, _ in state_events_and_contexts:
                if not context.rejected:
                    txn.call_after(
                        self.get_current_state_for_key.invalidate,
                        (event.room_id, event.type, event.state_key,)
                    )

                    if event.type in [EventTypes.Name, EventTypes.Aliases]:
                        txn.call_after(
                            self.get_room_name_and_aliases.invalidate,
                            (event.room_id,)
                        )

                    self._simple_upsert_txn(
                        txn,
                        "current_state_events",
                        keyvalues={
                            "room_id": event.room_id,
                            "type": event.type,
                            "state_key": event.state_key,
                        },
                        values={
                            "event_id": event.event_id,
                        }
                    )

        return
Example #5
0
    def _persist_event_txn(self,
                           txn,
                           event,
                           context,
                           backfilled,
                           stream_ordering=None,
                           is_new_state=True,
                           current_state=None):

        # Remove the any existing cache entries for the event_id
        txn.call_after(self._invalidate_get_event_cache, event.event_id)

        # We purposefully do this first since if we include a `current_state`
        # key, we *want* to update the `current_state_events` table
        if current_state:
            txn.call_after(self.get_current_state_for_key.invalidate_all)
            txn.call_after(self.get_rooms_for_user.invalidate_all)
            txn.call_after(self.get_users_in_room.invalidate, event.room_id)
            txn.call_after(self.get_joined_hosts_for_room.invalidate,
                           event.room_id)
            txn.call_after(self.get_room_name_and_aliases, event.room_id)

            self._simple_delete_txn(
                txn,
                table="current_state_events",
                keyvalues={"room_id": event.room_id},
            )

            for s in current_state:
                self._simple_insert_txn(
                    txn, "current_state_events", {
                        "event_id": s.event_id,
                        "room_id": s.room_id,
                        "type": s.type,
                        "state_key": s.state_key,
                    })

        outlier = event.internal_metadata.is_outlier()

        if not outlier:
            self._update_min_depth_for_room_txn(txn, event.room_id,
                                                event.depth)

        have_persisted = self._simple_select_one_txn(
            txn,
            table="events",
            keyvalues={"event_id": event.event_id},
            retcols=["event_id", "outlier"],
            allow_none=True,
        )

        metadata_json = encode_json(
            event.internal_metadata.get_dict(),
            using_frozen_dicts=USE_FROZEN_DICTS).decode("UTF-8")

        # If we have already persisted this event, we don't need to do any
        # more processing.
        # The processing above must be done on every call to persist event,
        # since they might not have happened on previous calls. For example,
        # if we are persisting an event that we had persisted as an outlier,
        # but is no longer one.
        if have_persisted:
            if not outlier and have_persisted["outlier"]:
                self._store_state_groups_txn(txn, event, context)

                sql = ("UPDATE event_json SET internal_metadata = ?"
                       " WHERE event_id = ?")
                txn.execute(sql, (
                    metadata_json,
                    event.event_id,
                ))

                sql = ("UPDATE events SET outlier = ?" " WHERE event_id = ?")
                txn.execute(sql, (
                    False,
                    event.event_id,
                ))
            return

        if not outlier:
            self._store_state_groups_txn(txn, event, context)

        self._handle_prev_events(
            txn,
            outlier=outlier,
            event_id=event.event_id,
            prev_events=event.prev_events,
            room_id=event.room_id,
        )

        if event.type == EventTypes.Member:
            self._store_room_member_txn(txn, event)
        elif event.type == EventTypes.Name:
            self._store_room_name_txn(txn, event)
        elif event.type == EventTypes.Topic:
            self._store_room_topic_txn(txn, event)
        elif event.type == EventTypes.Redaction:
            self._store_redaction(txn, event)

        event_dict = {
            k: v
            for k, v in event.get_dict().items() if k not in [
                "redacted",
                "redacted_because",
            ]
        }

        self._simple_insert_txn(
            txn,
            table="event_json",
            values={
                "event_id":
                event.event_id,
                "room_id":
                event.room_id,
                "internal_metadata":
                metadata_json,
                "json":
                encode_json(
                    event_dict,
                    using_frozen_dicts=USE_FROZEN_DICTS).decode("UTF-8"),
            },
        )

        content = encode_json(
            event.content, using_frozen_dicts=USE_FROZEN_DICTS).decode("UTF-8")

        vals = {
            "topological_ordering": event.depth,
            "event_id": event.event_id,
            "type": event.type,
            "room_id": event.room_id,
            "content": content,
            "processed": True,
            "outlier": outlier,
            "depth": event.depth,
        }

        unrec = {
            k: v
            for k, v in event.get_dict().items()
            if k not in vals.keys() and k not in [
                "redacted",
                "redacted_because",
                "signatures",
                "hashes",
                "prev_events",
            ]
        }

        vals["unrecognized_keys"] = encode_json(
            unrec, using_frozen_dicts=USE_FROZEN_DICTS).decode("UTF-8")

        sql = ("INSERT INTO events"
               " (stream_ordering, topological_ordering, event_id, type,"
               " room_id, content, processed, outlier, depth)"
               " VALUES (?,?,?,?,?,?,?,?,?)")

        txn.execute(sql,
                    (stream_ordering, event.depth, event.event_id, event.type,
                     event.room_id, content, True, outlier, event.depth))

        if context.rejected:
            self._store_rejections_txn(txn, event.event_id, context.rejected)

        for hash_alg, hash_base64 in event.hashes.items():
            hash_bytes = decode_base64(hash_base64)
            self._store_event_content_hash_txn(
                txn,
                event.event_id,
                hash_alg,
                hash_bytes,
            )

        for prev_event_id, prev_hashes in event.prev_events:
            for alg, hash_base64 in prev_hashes.items():
                hash_bytes = decode_base64(hash_base64)
                self._store_prev_event_hash_txn(txn, event.event_id,
                                                prev_event_id, alg, hash_bytes)

        self._simple_insert_many_txn(
            txn,
            table="event_auth",
            values=[{
                "event_id": event.event_id,
                "room_id": event.room_id,
                "auth_id": auth_id,
            } for auth_id, _ in event.auth_events],
        )

        (ref_alg, ref_hash_bytes) = compute_event_reference_hash(event)
        self._store_event_reference_hash_txn(txn, event.event_id, ref_alg,
                                             ref_hash_bytes)

        if event.is_state():
            vals = {
                "event_id": event.event_id,
                "room_id": event.room_id,
                "type": event.type,
                "state_key": event.state_key,
            }

            # TODO: How does this work with backfilling?
            if hasattr(event, "replaces_state"):
                vals["prev_state"] = event.replaces_state

            self._simple_insert_txn(
                txn,
                "state_events",
                vals,
            )

            self._simple_insert_many_txn(
                txn,
                table="event_edges",
                values=[{
                    "event_id": event.event_id,
                    "prev_event_id": e_id,
                    "room_id": event.room_id,
                    "is_state": True,
                } for e_id, h in event.prev_state],
            )

            if is_new_state and not context.rejected:
                txn.call_after(self.get_current_state_for_key.invalidate,
                               event.room_id, event.type, event.state_key)

                if (event.type == EventTypes.Name
                        or event.type == EventTypes.Aliases):
                    txn.call_after(self.get_room_name_and_aliases.invalidate,
                                   event.room_id)

                self._simple_upsert_txn(txn,
                                        "current_state_events",
                                        keyvalues={
                                            "room_id": event.room_id,
                                            "type": event.type,
                                            "state_key": event.state_key,
                                        },
                                        values={
                                            "event_id": event.event_id,
                                        })

        return
Example #6
0
    def _persist_events_txn(self,
                            txn,
                            events_and_contexts,
                            backfilled,
                            is_new_state=True):

        # Remove the any existing cache entries for the event_ids
        for event, _ in events_and_contexts:
            txn.call_after(self._invalidate_get_event_cache, event.event_id)

        depth_updates = {}
        for event, _ in events_and_contexts:
            if event.internal_metadata.is_outlier():
                continue
            depth_updates[event.room_id] = max(
                event.depth, depth_updates.get(event.room_id, event.depth))

        for room_id, depth in depth_updates.items():
            self._update_min_depth_for_room_txn(txn, room_id, depth)

        txn.execute(
            "SELECT event_id, outlier FROM events WHERE event_id in (%s)" %
            (",".join(["?"] * len(events_and_contexts)), ),
            [event.event_id for event, _ in events_and_contexts])
        have_persisted = {
            event_id: outlier
            for event_id, outlier in txn.fetchall()
        }

        event_map = {}
        to_remove = set()
        for event, context in events_and_contexts:
            # Handle the case of the list including the same event multiple
            # times. The tricky thing here is when they differ by whether
            # they are an outlier.
            if event.event_id in event_map:
                other = event_map[event.event_id]

                if not other.internal_metadata.is_outlier():
                    to_remove.add(event)
                    continue
                elif not event.internal_metadata.is_outlier():
                    to_remove.add(event)
                    continue
                else:
                    to_remove.add(other)

            event_map[event.event_id] = event

            if event.event_id not in have_persisted:
                continue

            to_remove.add(event)

            outlier_persisted = have_persisted[event.event_id]
            if not event.internal_metadata.is_outlier() and outlier_persisted:
                self._store_state_groups_txn(
                    txn,
                    event,
                    context,
                )

                metadata_json = encode_json(
                    event.internal_metadata.get_dict(),
                    using_frozen_dicts=USE_FROZEN_DICTS).decode("UTF-8")

                sql = ("UPDATE event_json SET internal_metadata = ?"
                       " WHERE event_id = ?")
                txn.execute(sql, (
                    metadata_json,
                    event.event_id,
                ))

                sql = ("UPDATE events SET outlier = ?" " WHERE event_id = ?")
                txn.execute(sql, (
                    False,
                    event.event_id,
                ))

                self._update_extremeties(txn, [event])

        events_and_contexts = filter(lambda ec: ec[0] not in to_remove,
                                     events_and_contexts)

        if not events_and_contexts:
            return

        self._store_mult_state_groups_txn(
            txn, [(event, context) for event, context in events_and_contexts
                  if not event.internal_metadata.is_outlier()])

        self._handle_mult_prev_events(
            txn,
            events=[event for event, _ in events_and_contexts],
        )

        for event, _ in events_and_contexts:
            if event.type == EventTypes.Name:
                self._store_room_name_txn(txn, event)
            elif event.type == EventTypes.Topic:
                self._store_room_topic_txn(txn, event)
            elif event.type == EventTypes.Redaction:
                self._store_redaction(txn, event)

        self._store_room_members_txn(txn, [
            event for event, _ in events_and_contexts
            if event.type == EventTypes.Member
        ])

        def event_dict(event):
            return {
                k: v
                for k, v in event.get_dict().items() if k not in [
                    "redacted",
                    "redacted_because",
                ]
            }

        self._simple_insert_many_txn(
            txn,
            table="event_json",
            values=[{
                "event_id":
                event.event_id,
                "room_id":
                event.room_id,
                "internal_metadata":
                encode_json(
                    event.internal_metadata.get_dict(),
                    using_frozen_dicts=USE_FROZEN_DICTS).decode("UTF-8"),
                "json":
                encode_json(
                    event_dict(event),
                    using_frozen_dicts=USE_FROZEN_DICTS).decode("UTF-8"),
            } for event, _ in events_and_contexts],
        )

        self._simple_insert_many_txn(
            txn,
            table="events",
            values=[{
                "stream_ordering":
                event.internal_metadata.stream_ordering,
                "topological_ordering":
                event.depth,
                "depth":
                event.depth,
                "event_id":
                event.event_id,
                "room_id":
                event.room_id,
                "type":
                event.type,
                "processed":
                True,
                "outlier":
                event.internal_metadata.is_outlier(),
                "content":
                encode_json(
                    event.content,
                    using_frozen_dicts=USE_FROZEN_DICTS).decode("UTF-8"),
            } for event, _ in events_and_contexts],
        )

        if context.rejected:
            self._store_rejections_txn(txn, event.event_id, context.rejected)

        self._simple_insert_many_txn(
            txn,
            table="event_auth",
            values=[{
                "event_id": event.event_id,
                "room_id": event.room_id,
                "auth_id": auth_id,
            } for event, _ in events_and_contexts
                    for auth_id, _ in event.auth_events],
        )

        self._store_event_reference_hashes_txn(
            txn, [event for event, _ in events_and_contexts])

        state_events_and_contexts = filter(
            lambda i: i[0].is_state(),
            events_and_contexts,
        )

        state_values = []
        for event, context in state_events_and_contexts:
            vals = {
                "event_id": event.event_id,
                "room_id": event.room_id,
                "type": event.type,
                "state_key": event.state_key,
            }

            # TODO: How does this work with backfilling?
            if hasattr(event, "replaces_state"):
                vals["prev_state"] = event.replaces_state

            state_values.append(vals)

        self._simple_insert_many_txn(
            txn,
            table="state_events",
            values=state_values,
        )

        self._simple_insert_many_txn(
            txn,
            table="event_edges",
            values=[{
                "event_id": event.event_id,
                "prev_event_id": prev_id,
                "room_id": event.room_id,
                "is_state": True,
            } for event, _ in state_events_and_contexts
                    for prev_id, _ in event.prev_state],
        )

        if is_new_state:
            for event, _ in state_events_and_contexts:
                if not context.rejected:
                    txn.call_after(self.get_current_state_for_key.invalidate, (
                        event.room_id,
                        event.type,
                        event.state_key,
                    ))

                    if event.type in [EventTypes.Name, EventTypes.Aliases]:
                        txn.call_after(
                            self.get_room_name_and_aliases.invalidate,
                            (event.room_id, ))

                    self._simple_upsert_txn(txn,
                                            "current_state_events",
                                            keyvalues={
                                                "room_id": event.room_id,
                                                "type": event.type,
                                                "state_key": event.state_key,
                                            },
                                            values={
                                                "event_id": event.event_id,
                                            })

        return