Beispiel #1
0
def get_server_keys(server_name, target, port):
    url = "https://%s:%i/_matrix/key/v1" % (target, port)
    keys = json.load(urllib2.urlopen(url))
    verify_keys = {}
    for key_id, key_base64 in keys["verify_keys"].items():
        verify_key = decode_verify_key_bytes(key_id, decode_base64(key_base64))
        verify_signed_json(keys, server_name, verify_key)
        verify_keys[key_id] = verify_key
    return verify_keys
Beispiel #2
0
def get_server_keys(server_name, target, port):
    url = "https://%s:%i/_matrix/key/v1" % (target, port)
    keys = json.load(urllib2.urlopen(url))
    verify_keys = {}
    for key_id, key_base64 in keys["verify_keys"].items():
        verify_key = decode_verify_key_bytes(key_id, decode_base64(key_base64))
        verify_signed_json(keys, server_name, verify_key)
        verify_keys[key_id] = verify_key
    return verify_keys
Beispiel #3
0
 def read_perspectives(self, perspectives_config):
     servers = {}
     for server_name, server_config in perspectives_config["servers"].items():
         for key_id, key_data in server_config["verify_keys"].items():
             if is_signing_algorithm_supported(key_id):
                 key_base64 = key_data["key"]
                 key_bytes = decode_base64(key_base64)
                 verify_key = decode_verify_key_bytes(key_id, key_bytes)
                 servers.setdefault(server_name, {})[key_id] = verify_key
     return servers
Beispiel #4
0
 def read_perspectives(self, perspectives_config):
     servers = {}
     for server_name, server_config in perspectives_config["servers"].items(
     ):
         for key_id, key_data in server_config["verify_keys"].items():
             if is_signing_algorithm_supported(key_id):
                 key_base64 = key_data["key"]
                 key_bytes = decode_base64(key_base64)
                 verify_key = decode_verify_key_bytes(key_id, key_bytes)
                 servers.setdefault(server_name, {})[key_id] = verify_key
     return servers
Beispiel #5
0
def decode_verify_key_base64(algorithm, version, key_base64):
    """Decode a base64 encoded verify key
    Args:
        algorithm (str): The algorithm the key is for (currently "ed25519").
        version (str): Identifies this key out of the keys for this entity.
        key_base64 (str): Base64 encoded bytes of the key.
    Returns:
        A VerifyKey object.
    """
    key_id = "%s:%s" % (algorithm, version)
    key_bytes = decode_base64(key_base64)
    return decode_verify_key_bytes(key_id, key_bytes)
Beispiel #6
0
 def read_old_signing_keys(self, old_signing_keys):
     keys = {}
     for key_id, key_data in old_signing_keys.items():
         if is_signing_algorithm_supported(key_id):
             key_base64 = key_data["key"]
             key_bytes = decode_base64(key_base64)
             verify_key = decode_verify_key_bytes(key_id, key_bytes)
             verify_key.expired_ts = key_data["expired_ts"]
             keys[key_id] = verify_key
         else:
             raise ConfigError(
                 "Unsupported signing algorithm for old key: %r" % (key_id,)
             )
     return keys
Beispiel #7
0
 def read_old_signing_keys(self, old_signing_keys):
     keys = {}
     for key_id, key_data in old_signing_keys.items():
         if is_signing_algorithm_supported(key_id):
             key_base64 = key_data["key"]
             key_bytes = decode_base64(key_base64)
             verify_key = decode_verify_key_bytes(key_id, key_bytes)
             verify_key.expired_ts = key_data["expired_ts"]
             keys[key_id] = verify_key
         else:
             raise ConfigError(
                 "Unsupported signing algorithm for old key: %r" %
                 (key_id, ))
     return keys
Beispiel #8
0
def decode_signing_key_base64(algorithm, version, key_base64):
    """Decode a base64 encoded signing key
    Args:
        algorithm (str): The algorithm the key is for (currently "ed25519").
        version (str): Identifies this key out of the keys for this entity.
        key_base64 (str): Base64 encoded bytes of the key.
    Returns:
        A SigningKey object.
    """
    if algorithm == NACL_ED25519:
        key_bytes = decode_base64(key_base64)
        key = nacl.signing.SigningKey(key_bytes)
        key.version = version
        key.alg = NACL_ED25519
        return key
    else:
        raise ValueError("Unsupported algorithm %s" % (algorithm,))
Beispiel #9
0
def decode_signing_key_base64(algorithm, version, key_base64):
    """Decode a base64 encoded signing key
    Args:
        algorithm (str): The algorithm the key is for (currently "ed25519").
        version (str): Identifies this key out of the keys for this entity.
        key_base64 (str): Base64 encoded bytes of the key.
    Returns:
        A SigningKey object.
    """
    if algorithm == NACL_ED25519:
        key_bytes = decode_base64(key_base64)
        key = nacl.signing.SigningKey(key_bytes)
        key.version = version
        key.alg = NACL_ED25519
        return key
    else:
        raise ValueError("Unsupported algorithm %s" % (algorithm,))
Beispiel #10
0
def verify_signed_json(json_object, signature_name, verify_key):
    """Check a signature on a signed JSON object.

    Args:
        json_object (dict): The signed JSON object to check.
        signature_name (str): The name of the signature to check.
        verify_key (syutil.crypto.VerifyKey): The key to verify the signature.

    Raises:
        InvalidSignature: If the signature isn't valid
    """

    try:
        signatures = json_object["signatures"]
    except KeyError:
        raise SignatureVerifyException("No signatures on this object")

    key_id = "%s:%s" % (verify_key.alg, verify_key.version)

    try:
        signature_b64 = signatures[signature_name][key_id]
    except:
        raise SignatureVerifyException("Missing signature for %s, %s" %
                                       (signature_name, key_id))

    try:
        signature = decode_base64(signature_b64)
    except:
        raise SignatureVerifyException("Invalid signature base64 for %s, %s" %
                                       (signature_name, key_id))

    json_object_copy = dict(json_object)
    del json_object_copy["signatures"]
    json_object_copy.pop("unsigned", None)

    message = encode_canonical_json(json_object_copy)

    #logger.debug("VERIFY: %s %s %s", signature_name, key_id, message)

    try:
        verify_key.verify(message, signature)
    except:
        logger.exception("Error verifying signature")
        raise SignatureVerifyException("Unable to verify signature for %s " %
                                       signature_name)
Beispiel #11
0
    def _async_render_GET(self, request):
        try:
            server_keys, certificate = yield fetch_server_key(
                self.server_name,
                self.key_server.ssl_context_factory
            )

            resp_server_name = server_keys[u"server_name"]
            verify_key_b64 = server_keys[u"signature_verify_key"]
            tls_certificate_b64 = server_keys[u"tls_certificate"]
            verify_key = VerifyKey(decode_base64(verify_key_b64))

            if resp_server_name != self.server_name:
                raise ValueError("Wrong server name '%s' != '%s'" %
                                 (resp_server_name, self.server_name))

            x509_certificate_bytes = crypto.dump_certificate(
                crypto.FILETYPE_ASN1,
                certificate
            )

            if encode_base64(x509_certificate_bytes) != tls_certificate_b64:
                raise ValueError("TLS certificate doesn't match")

            verify_signed_json(server_keys, self.server_name, verify_key)

            signed_json = sign_json(
                server_keys,
                self.key_server.server_name,
                self.key_server.signing_key
            )

            json_bytes = encode_canonical_json(signed_json)
            respond_with_json_bytes(request, 200, json_bytes)

        except Exception as e:
            json_bytes = encode_canonical_json({
                u"error": {u"code": 502, u"message": e.message}
            })
            respond_with_json_bytes(request, 502, json_bytes)
Beispiel #12
0
def check_event_content_hash(event, hash_algorithm=hashlib.sha256):
    """Check whether the hash for this PDU matches the contents"""
    computed_hash = _compute_content_hash(event, hash_algorithm)
    logger.debug("Expecting hash: %s", encode_base64(computed_hash.digest()))
    if computed_hash.name not in event.hashes:
        raise SynapseError(
            400,
            "Algorithm %s not in hashes %s" % (
                computed_hash.name, list(event.hashes),
            ),
            Codes.UNAUTHORIZED,
        )
    message_hash_base64 = event.hashes[computed_hash.name]
    try:
        message_hash_bytes = decode_base64(message_hash_base64)
    except:
        raise SynapseError(
            400,
            "Invalid base64: %s" % (message_hash_base64,),
            Codes.UNAUTHORIZED,
        )
    return message_hash_bytes == computed_hash.digest()
Beispiel #13
0
def check_event_content_hash(event, hash_algorithm=hashlib.sha256):
    """Check whether the hash for this PDU matches the contents"""
    name, expected_hash = compute_content_hash(event, hash_algorithm)
    logger.debug("Expecting hash: %s", encode_base64(expected_hash))
    if name not in event.hashes:
        raise SynapseError(
            400,
            "Algorithm %s not in hashes %s" % (
                name,
                list(event.hashes),
            ),
            Codes.UNAUTHORIZED,
        )
    message_hash_base64 = event.hashes[name]
    try:
        message_hash_bytes = decode_base64(message_hash_base64)
    except:
        raise SynapseError(
            400,
            "Invalid base64: %s" % (message_hash_base64, ),
            Codes.UNAUTHORIZED,
        )
    return message_hash_bytes == expected_hash
Beispiel #14
0
def select_pdus(cursor):
    cursor.execute("SELECT pdu_id, origin FROM pdus ORDER BY depth ASC")

    ids = cursor.fetchall()

    pdu_tuples = store._get_pdu_tuples(cursor, ids)

    pdus = [Pdu.from_pdu_tuple(p) for p in pdu_tuples]

    reference_hashes = {}

    for pdu in pdus:
        try:
            if pdu.prev_pdus:
                print "PROCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus
                for pdu_id, origin, hashes in pdu.prev_pdus:
                    ref_alg, ref_hsh = reference_hashes[(pdu_id, origin)]
                    hashes[ref_alg] = encode_base64(ref_hsh)
                    store._store_prev_pdu_hash_txn(cursor, pdu.pdu_id,
                                                   pdu.origin, pdu_id, origin,
                                                   ref_alg, ref_hsh)
                print "SUCCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus
            pdu = add_event_pdu_content_hash(pdu)
            ref_alg, ref_hsh = compute_pdu_event_reference_hash(pdu)
            reference_hashes[(pdu.pdu_id, pdu.origin)] = (ref_alg, ref_hsh)
            store._store_pdu_reference_hash_txn(cursor, pdu.pdu_id, pdu.origin,
                                                ref_alg, ref_hsh)

            for alg, hsh_base64 in pdu.hashes.items():
                print alg, hsh_base64
                store._store_pdu_content_hash_txn(cursor, pdu.pdu_id,
                                                  pdu.origin, alg,
                                                  decode_base64(hsh_base64))

        except:
            print "FAILED_", pdu.pdu_id, pdu.origin, pdu.prev_pdus
Beispiel #15
0
    def _persist_event_txn(self,
                           txn,
                           event,
                           context,
                           backfilled,
                           stream_ordering=None,
                           is_new_state=True,
                           current_state=None):

        # Remove the any existing cache entries for the event_id
        txn.call_after(self._invalidate_get_event_cache, event.event_id)

        if stream_ordering is None:
            with self._stream_id_gen.get_next_txn(txn) as stream_ordering:
                return self._persist_event_txn(
                    txn,
                    event,
                    context,
                    backfilled,
                    stream_ordering=stream_ordering,
                    is_new_state=is_new_state,
                    current_state=current_state,
                )

        # We purposefully do this first since if we include a `current_state`
        # key, we *want* to update the `current_state_events` table
        if current_state:
            self._simple_delete_txn(
                txn,
                table="current_state_events",
                keyvalues={"room_id": event.room_id},
            )

            for s in current_state:
                if s.type == EventTypes.Member:
                    txn.call_after(self.get_rooms_for_user.invalidate,
                                   s.state_key)
                    txn.call_after(self.get_joined_hosts_for_room.invalidate,
                                   s.room_id)
                self._simple_insert_txn(
                    txn, "current_state_events", {
                        "event_id": s.event_id,
                        "room_id": s.room_id,
                        "type": s.type,
                        "state_key": s.state_key,
                    })

        outlier = event.internal_metadata.is_outlier()

        if not outlier:
            self._store_state_groups_txn(txn, event, context)

            self._update_min_depth_for_room_txn(txn, event.room_id,
                                                event.depth)

        have_persisted = self._simple_select_one_onecol_txn(
            txn,
            table="event_json",
            keyvalues={"event_id": event.event_id},
            retcol="event_id",
            allow_none=True,
        )

        metadata_json = encode_canonical_json(
            event.internal_metadata.get_dict()).decode("UTF-8")

        # If we have already persisted this event, we don't need to do any
        # more processing.
        # The processing above must be done on every call to persist event,
        # since they might not have happened on previous calls. For example,
        # if we are persisting an event that we had persisted as an outlier,
        # but is no longer one.
        if have_persisted:
            if not outlier:
                sql = ("UPDATE event_json SET internal_metadata = ?"
                       " WHERE event_id = ?")
                txn.execute(sql, (
                    metadata_json,
                    event.event_id,
                ))

                sql = ("UPDATE events SET outlier = ?" " WHERE event_id = ?")
                txn.execute(sql, (
                    False,
                    event.event_id,
                ))
            return

        self._handle_prev_events(
            txn,
            outlier=outlier,
            event_id=event.event_id,
            prev_events=event.prev_events,
            room_id=event.room_id,
        )

        if event.type == EventTypes.Member:
            self._store_room_member_txn(txn, event)
        elif event.type == EventTypes.Name:
            self._store_room_name_txn(txn, event)
        elif event.type == EventTypes.Topic:
            self._store_room_topic_txn(txn, event)
        elif event.type == EventTypes.Redaction:
            self._store_redaction(txn, event)

        event_dict = {
            k: v
            for k, v in event.get_dict().items() if k not in [
                "redacted",
                "redacted_because",
            ]
        }

        self._simple_insert_txn(
            txn,
            table="event_json",
            values={
                "event_id": event.event_id,
                "room_id": event.room_id,
                "internal_metadata": metadata_json,
                "json": encode_canonical_json(event_dict).decode("UTF-8"),
            },
        )

        content = encode_canonical_json(event.content).decode("UTF-8")

        vals = {
            "topological_ordering": event.depth,
            "event_id": event.event_id,
            "type": event.type,
            "room_id": event.room_id,
            "content": content,
            "processed": True,
            "outlier": outlier,
            "depth": event.depth,
        }

        unrec = {
            k: v
            for k, v in event.get_dict().items()
            if k not in vals.keys() and k not in [
                "redacted",
                "redacted_because",
                "signatures",
                "hashes",
                "prev_events",
            ]
        }

        vals["unrecognized_keys"] = encode_canonical_json(unrec).decode(
            "UTF-8")

        sql = ("INSERT INTO events"
               " (stream_ordering, topological_ordering, event_id, type,"
               " room_id, content, processed, outlier, depth)"
               " VALUES (?,?,?,?,?,?,?,?,?)")

        txn.execute(sql,
                    (stream_ordering, event.depth, event.event_id, event.type,
                     event.room_id, content, True, outlier, event.depth))

        if context.rejected:
            self._store_rejections_txn(txn, event.event_id, context.rejected)

        for hash_alg, hash_base64 in event.hashes.items():
            hash_bytes = decode_base64(hash_base64)
            self._store_event_content_hash_txn(
                txn,
                event.event_id,
                hash_alg,
                hash_bytes,
            )

        for prev_event_id, prev_hashes in event.prev_events:
            for alg, hash_base64 in prev_hashes.items():
                hash_bytes = decode_base64(hash_base64)
                self._store_prev_event_hash_txn(txn, event.event_id,
                                                prev_event_id, alg, hash_bytes)

        self._simple_insert_many_txn(
            txn,
            table="event_auth",
            values=[{
                "event_id": event.event_id,
                "room_id": event.room_id,
                "auth_id": auth_id,
            } for auth_id, _ in event.auth_events],
        )

        (ref_alg, ref_hash_bytes) = compute_event_reference_hash(event)
        self._store_event_reference_hash_txn(txn, event.event_id, ref_alg,
                                             ref_hash_bytes)

        if event.is_state():
            vals = {
                "event_id": event.event_id,
                "room_id": event.room_id,
                "type": event.type,
                "state_key": event.state_key,
            }

            # TODO: How does this work with backfilling?
            if hasattr(event, "replaces_state"):
                vals["prev_state"] = event.replaces_state

            self._simple_insert_txn(
                txn,
                "state_events",
                vals,
            )

            self._simple_insert_many_txn(
                txn,
                table="event_edges",
                values=[{
                    "event_id": event.event_id,
                    "prev_event_id": e_id,
                    "room_id": event.room_id,
                    "is_state": True,
                } for e_id, h in event.prev_state],
            )

            if is_new_state and not context.rejected:
                self._simple_upsert_txn(txn,
                                        "current_state_events",
                                        keyvalues={
                                            "room_id": event.room_id,
                                            "type": event.type,
                                            "state_key": event.state_key,
                                        },
                                        values={
                                            "event_id": event.event_id,
                                        })

        return
Beispiel #16
0
    def process_v2_response(self,
                            from_server,
                            response_json,
                            requested_ids=[]):
        time_now_ms = self.clock.time_msec()
        response_keys = {}
        verify_keys = {}
        for key_id, key_data in response_json["verify_keys"].items():
            if is_signing_algorithm_supported(key_id):
                key_base64 = key_data["key"]
                key_bytes = decode_base64(key_base64)
                verify_key = decode_verify_key_bytes(key_id, key_bytes)
                verify_key.time_added = time_now_ms
                verify_keys[key_id] = verify_key

        old_verify_keys = {}
        for key_id, key_data in response_json["old_verify_keys"].items():
            if is_signing_algorithm_supported(key_id):
                key_base64 = key_data["key"]
                key_bytes = decode_base64(key_base64)
                verify_key = decode_verify_key_bytes(key_id, key_bytes)
                verify_key.expired = key_data["expired_ts"]
                verify_key.time_added = time_now_ms
                old_verify_keys[key_id] = verify_key

        results = {}
        server_name = response_json["server_name"]
        for key_id in response_json["signatures"].get(server_name, {}):
            if key_id not in response_json["verify_keys"]:
                raise ValueError(
                    "Key response must include verification keys for all"
                    " signatures")
            if key_id in verify_keys:
                verify_signed_json(response_json, server_name,
                                   verify_keys[key_id])

        signed_key_json = sign_json(
            response_json,
            self.config.server_name,
            self.config.signing_key[0],
        )

        signed_key_json_bytes = encode_canonical_json(signed_key_json)
        ts_valid_until_ms = signed_key_json[u"valid_until_ts"]

        updated_key_ids = set(requested_ids)
        updated_key_ids.update(verify_keys)
        updated_key_ids.update(old_verify_keys)

        response_keys.update(verify_keys)
        response_keys.update(old_verify_keys)

        yield defer.gatherResults(
            [
                self.store.store_server_keys_json(
                    server_name=server_name,
                    key_id=key_id,
                    from_server=server_name,
                    ts_now_ms=time_now_ms,
                    ts_expires_ms=ts_valid_until_ms,
                    key_json_bytes=signed_key_json_bytes,
                ) for key_id in updated_key_ids
            ],
            consumeErrors=True,
        ).addErrback(unwrapFirstError)

        results[server_name] = response_keys

        defer.returnValue(results)
Beispiel #17
0
    def _persist_event_txn(self, txn, event, context, backfilled,
                           stream_ordering=None, is_new_state=True,
                           current_state=None):

        # Remove the any existing cache entries for the event_id
        self._get_event_cache.pop(event.event_id)

        # We purposefully do this first since if we include a `current_state`
        # key, we *want* to update the `current_state_events` table
        if current_state:
            txn.execute(
                "DELETE FROM current_state_events WHERE room_id = ?",
                (event.room_id,)
            )

            for s in current_state:
                self._simple_insert_txn(
                    txn,
                    "current_state_events",
                    {
                        "event_id": s.event_id,
                        "room_id": s.room_id,
                        "type": s.type,
                        "state_key": s.state_key,
                    },
                    or_replace=True,
                )

        if event.is_state() and is_new_state:
            if not backfilled and not context.rejected:
                self._simple_insert_txn(
                    txn,
                    table="state_forward_extremities",
                    values={
                        "event_id": event.event_id,
                        "room_id": event.room_id,
                        "type": event.type,
                        "state_key": event.state_key,
                    },
                    or_replace=True,
                )

                for prev_state_id, _ in event.prev_state:
                    self._simple_delete_txn(
                        txn,
                        table="state_forward_extremities",
                        keyvalues={
                            "event_id": prev_state_id,
                        }
                    )

        outlier = event.internal_metadata.is_outlier()

        if not outlier:
            self._store_state_groups_txn(txn, event, context)

            self._update_min_depth_for_room_txn(
                txn,
                event.room_id,
                event.depth
            )

        self._handle_prev_events(
            txn,
            outlier=outlier,
            event_id=event.event_id,
            prev_events=event.prev_events,
            room_id=event.room_id,
        )

        have_persisted = self._simple_select_one_onecol_txn(
            txn,
            table="event_json",
            keyvalues={"event_id": event.event_id},
            retcol="event_id",
            allow_none=True,
        )

        metadata_json = encode_canonical_json(
            event.internal_metadata.get_dict()
        )

        # If we have already persisted this event, we don't need to do any
        # more processing.
        # The processing above must be done on every call to persist event,
        # since they might not have happened on previous calls. For example,
        # if we are persisting an event that we had persisted as an outlier,
        # but is no longer one.
        if have_persisted:
            if not outlier:
                sql = (
                    "UPDATE event_json SET internal_metadata = ?"
                    " WHERE event_id = ?"
                )
                txn.execute(
                    sql,
                    (metadata_json.decode("UTF-8"), event.event_id,)
                )

                sql = (
                    "UPDATE events SET outlier = 0"
                    " WHERE event_id = ?"
                )
                txn.execute(
                    sql,
                    (event.event_id,)
                )
            return

        if event.type == EventTypes.Member:
            self._store_room_member_txn(txn, event)
        elif event.type == EventTypes.Feedback:
            self._store_feedback_txn(txn, event)
        elif event.type == EventTypes.Name:
            self._store_room_name_txn(txn, event)
        elif event.type == EventTypes.Topic:
            self._store_room_topic_txn(txn, event)
        elif event.type == EventTypes.Redaction:
            self._store_redaction(txn, event)

        event_dict = {
            k: v
            for k, v in event.get_dict().items()
            if k not in [
                "redacted",
                "redacted_because",
            ]
        }

        self._simple_insert_txn(
            txn,
            table="event_json",
            values={
                "event_id": event.event_id,
                "room_id": event.room_id,
                "internal_metadata": metadata_json.decode("UTF-8"),
                "json": encode_canonical_json(event_dict).decode("UTF-8"),
            },
            or_replace=True,
        )

        content = encode_canonical_json(
            event.content
        ).decode("UTF-8")

        vals = {
            "topological_ordering": event.depth,
            "event_id": event.event_id,
            "type": event.type,
            "room_id": event.room_id,
            "content": content,
            "processed": True,
            "outlier": outlier,
            "depth": event.depth,
        }

        if stream_ordering is not None:
            vals["stream_ordering"] = stream_ordering

        unrec = {
            k: v
            for k, v in event.get_dict().items()
            if k not in vals.keys() and k not in [
                "redacted",
                "redacted_because",
                "signatures",
                "hashes",
                "prev_events",
            ]
        }

        vals["unrecognized_keys"] = encode_canonical_json(
            unrec
        ).decode("UTF-8")

        try:
            self._simple_insert_txn(
                txn,
                "events",
                vals,
                or_replace=(not outlier),
                or_ignore=bool(outlier),
            )
        except:
            logger.warn(
                "Failed to persist, probably duplicate: %s",
                event.event_id,
                exc_info=True,
            )
            raise _RollbackButIsFineException("_persist_event")

        if context.rejected:
            self._store_rejections_txn(txn, event.event_id, context.rejected)

        if event.is_state():
            vals = {
                "event_id": event.event_id,
                "room_id": event.room_id,
                "type": event.type,
                "state_key": event.state_key,
            }

            # TODO: How does this work with backfilling?
            if hasattr(event, "replaces_state"):
                vals["prev_state"] = event.replaces_state

            self._simple_insert_txn(
                txn,
                "state_events",
                vals,
                or_replace=True,
            )

            if is_new_state and not context.rejected:
                self._simple_insert_txn(
                    txn,
                    "current_state_events",
                    {
                        "event_id": event.event_id,
                        "room_id": event.room_id,
                        "type": event.type,
                        "state_key": event.state_key,
                    },
                    or_replace=True,
                )

            for e_id, h in event.prev_state:
                self._simple_insert_txn(
                    txn,
                    table="event_edges",
                    values={
                        "event_id": event.event_id,
                        "prev_event_id": e_id,
                        "room_id": event.room_id,
                        "is_state": 1,
                    },
                    or_ignore=True,
                )

        for hash_alg, hash_base64 in event.hashes.items():
            hash_bytes = decode_base64(hash_base64)
            self._store_event_content_hash_txn(
                txn, event.event_id, hash_alg, hash_bytes,
            )

        for prev_event_id, prev_hashes in event.prev_events:
            for alg, hash_base64 in prev_hashes.items():
                hash_bytes = decode_base64(hash_base64)
                self._store_prev_event_hash_txn(
                    txn, event.event_id, prev_event_id, alg, hash_bytes
                )

        for auth_id, _ in event.auth_events:
            self._simple_insert_txn(
                txn,
                table="event_auth",
                values={
                    "event_id": event.event_id,
                    "room_id": event.room_id,
                    "auth_id": auth_id,
                },
                or_ignore=True,
            )

        (ref_alg, ref_hash_bytes) = compute_event_reference_hash(event)
        self._store_event_reference_hash_txn(
            txn, event.event_id, ref_alg, ref_hash_bytes
        )
Beispiel #18
0
    def _persist_event_txn(self, txn, event, backfilled, stream_ordering=None,
                           is_new_state=True, current_state=None):
        if event.type == RoomMemberEvent.TYPE:
            self._store_room_member_txn(txn, event)
        elif event.type == FeedbackEvent.TYPE:
            self._store_feedback_txn(txn, event)
        elif event.type == RoomNameEvent.TYPE:
            self._store_room_name_txn(txn, event)
        elif event.type == RoomTopicEvent.TYPE:
            self._store_room_topic_txn(txn, event)
        elif event.type == RoomRedactionEvent.TYPE:
            self._store_redaction(txn, event)

        outlier = False
        if hasattr(event, "outlier"):
            outlier = event.outlier

        vals = {
            "topological_ordering": event.depth,
            "event_id": event.event_id,
            "type": event.type,
            "room_id": event.room_id,
            "content": json.dumps(event.content),
            "processed": True,
            "outlier": outlier,
            "depth": event.depth,
        }

        if stream_ordering is not None:
            vals["stream_ordering"] = stream_ordering

        unrec = {
            k: v
            for k, v in event.get_full_dict().items()
            if k not in vals.keys() and k not in [
                "redacted",
                "redacted_because",
                "signatures",
                "hashes",
                "prev_events",
            ]
        }
        vals["unrecognized_keys"] = json.dumps(unrec)

        try:
            self._simple_insert_txn(
                txn,
                "events",
                vals,
                or_replace=(not outlier),
                or_ignore=bool(outlier),
            )
        except:
            logger.warn(
                "Failed to persist, probably duplicate: %s",
                event.event_id,
                exc_info=True,
            )
            raise _RollbackButIsFineException("_persist_event")

        self._handle_prev_events(
            txn,
            outlier=outlier,
            event_id=event.event_id,
            prev_events=event.prev_events,
            room_id=event.room_id,
        )

        self._store_state_groups_txn(txn, event)

        if current_state:
            txn.execute(
                "DELETE FROM current_state_events WHERE room_id = ?",
                (event.room_id,)
            )

            for s in current_state:
                self._simple_insert_txn(
                    txn,
                    "current_state_events",
                    {
                        "event_id": s.event_id,
                        "room_id": s.room_id,
                        "type": s.type,
                        "state_key": s.state_key,
                    },
                    or_replace=True,
                )

        is_state = hasattr(event, "state_key") and event.state_key is not None
        if is_state:
            vals = {
                "event_id": event.event_id,
                "room_id": event.room_id,
                "type": event.type,
                "state_key": event.state_key,
            }

            if hasattr(event, "replaces_state"):
                vals["prev_state"] = event.replaces_state

            self._simple_insert_txn(
                txn,
                "state_events",
                vals,
                or_replace=True,
            )

            if is_new_state:
                self._simple_insert_txn(
                    txn,
                    "current_state_events",
                    {
                        "event_id": event.event_id,
                        "room_id": event.room_id,
                        "type": event.type,
                        "state_key": event.state_key,
                    },
                    or_replace=True,
                )

            for e_id, h in event.prev_state:
                self._simple_insert_txn(
                    txn,
                    table="event_edges",
                    values={
                        "event_id": event.event_id,
                        "prev_event_id": e_id,
                        "room_id": event.room_id,
                        "is_state": 1,
                    },
                    or_ignore=True,
                )

            if not backfilled:
                self._simple_insert_txn(
                    txn,
                    table="state_forward_extremities",
                    values={
                        "event_id": event.event_id,
                        "room_id": event.room_id,
                        "type": event.type,
                        "state_key": event.state_key,
                    },
                    or_replace=True,
                )

                for prev_state_id, _ in event.prev_state:
                    self._simple_delete_txn(
                        txn,
                        table="state_forward_extremities",
                        keyvalues={
                            "event_id": prev_state_id,
                        }
                    )

        for hash_alg, hash_base64 in event.hashes.items():
            hash_bytes = decode_base64(hash_base64)
            self._store_event_content_hash_txn(
                txn, event.event_id, hash_alg, hash_bytes,
            )

        if hasattr(event, "signatures"):
            logger.debug("sigs: %s", event.signatures)
            for name, sigs in event.signatures.items():
                for key_id, signature_base64 in sigs.items():
                    signature_bytes = decode_base64(signature_base64)
                    self._store_event_signature_txn(
                        txn, event.event_id, name, key_id,
                        signature_bytes,
                    )

        for prev_event_id, prev_hashes in event.prev_events:
            for alg, hash_base64 in prev_hashes.items():
                hash_bytes = decode_base64(hash_base64)
                self._store_prev_event_hash_txn(
                    txn, event.event_id, prev_event_id, alg, hash_bytes
                )

        for auth_id, _ in event.auth_events:
            self._simple_insert_txn(
                txn,
                table="event_auth",
                values={
                    "event_id": event.event_id,
                    "room_id": event.room_id,
                    "auth_id": auth_id,
                },
                or_ignore=True,
            )

        (ref_alg, ref_hash_bytes) = compute_event_reference_hash(event)
        self._store_event_reference_hash_txn(
            txn, event.event_id, ref_alg, ref_hash_bytes
        )

        if not outlier:
            self._update_min_depth_for_room_txn(
                txn,
                event.room_id,
                event.depth
            )
Beispiel #19
0
    def get_server_verify_key(self, server_name, key_ids):
        """Finds a verification key for the server with one of the key ids.
        Args:
            server_name (str): The name of the server to fetch a key for.
            keys_ids (list of str): The key_ids to check for.
        """

        # Check the datastore to see if we have one cached.
        cached = yield self.store.get_server_verify_keys(server_name, key_ids)

        if cached:
            defer.returnValue(cached[0])
            return

        # Try to fetch the key from the remote server.

        limiter = yield get_retry_limiter(
            server_name,
            self.clock,
            self.store,
        )

        with limiter:
            (response, tls_certificate) = yield fetch_server_key(
                server_name, self.hs.tls_context_factory
            )

        # Check the response.

        x509_certificate_bytes = crypto.dump_certificate(
            crypto.FILETYPE_ASN1, tls_certificate
        )

        if ("signatures" not in response
                or server_name not in response["signatures"]):
            raise ValueError("Key response not signed by remote server")

        if "tls_certificate" not in response:
            raise ValueError("Key response missing TLS certificate")

        tls_certificate_b64 = response["tls_certificate"]

        if encode_base64(x509_certificate_bytes) != tls_certificate_b64:
            raise ValueError("TLS certificate doesn't match")

        verify_keys = {}
        for key_id, key_base64 in response["verify_keys"].items():
            if is_signing_algorithm_supported(key_id):
                key_bytes = decode_base64(key_base64)
                verify_key = decode_verify_key_bytes(key_id, key_bytes)
                verify_keys[key_id] = verify_key

        for key_id in response["signatures"][server_name]:
            if key_id not in response["verify_keys"]:
                raise ValueError(
                    "Key response must include verification keys for all"
                    " signatures"
                )
            if key_id in verify_keys:
                verify_signed_json(
                    response,
                    server_name,
                    verify_keys[key_id]
                )

        # Cache the result in the datastore.

        time_now_ms = self.clock.time_msec()

        yield self.store.store_server_certificate(
            server_name,
            server_name,
            time_now_ms,
            tls_certificate,
        )

        for key_id, key in verify_keys.items():
            yield self.store.store_server_verify_key(
                server_name, server_name, time_now_ms, key
            )

        for key_id in key_ids:
            if key_id in verify_keys:
                defer.returnValue(verify_keys[key_id])
                return

        raise ValueError("No verification key found for given key ids")
Beispiel #20
0
    def get_server_verify_key_v1_direct(self, server_name, key_ids):
        """Finds a verification key for the server with one of the key ids.
        Args:
            server_name (str): The name of the server to fetch a key for.
            keys_ids (list of str): The key_ids to check for.
        """

        # Try to fetch the key from the remote server.

        (response,
         tls_certificate) = yield fetch_server_key(server_name,
                                                   self.hs.tls_context_factory)

        # Check the response.

        x509_certificate_bytes = crypto.dump_certificate(
            crypto.FILETYPE_ASN1, tls_certificate)

        if ("signatures" not in response
                or server_name not in response["signatures"]):
            raise ValueError("Key response not signed by remote server")

        if "tls_certificate" not in response:
            raise ValueError("Key response missing TLS certificate")

        tls_certificate_b64 = response["tls_certificate"]

        if encode_base64(x509_certificate_bytes) != tls_certificate_b64:
            raise ValueError("TLS certificate doesn't match")

        # Cache the result in the datastore.

        time_now_ms = self.clock.time_msec()

        verify_keys = {}
        for key_id, key_base64 in response["verify_keys"].items():
            if is_signing_algorithm_supported(key_id):
                key_bytes = decode_base64(key_base64)
                verify_key = decode_verify_key_bytes(key_id, key_bytes)
                verify_key.time_added = time_now_ms
                verify_keys[key_id] = verify_key

        for key_id in response["signatures"][server_name]:
            if key_id not in response["verify_keys"]:
                raise ValueError(
                    "Key response must include verification keys for all"
                    " signatures")
            if key_id in verify_keys:
                verify_signed_json(response, server_name, verify_keys[key_id])

        yield self.store.store_server_certificate(
            server_name,
            server_name,
            time_now_ms,
            tls_certificate,
        )

        yield self.store_keys(
            server_name=server_name,
            from_server=server_name,
            verify_keys=verify_keys,
        )

        defer.returnValue(verify_keys)
Beispiel #21
0
    def process_v2_response(self, from_server, response_json,
                            requested_ids=[]):
        time_now_ms = self.clock.time_msec()
        response_keys = {}
        verify_keys = {}
        for key_id, key_data in response_json["verify_keys"].items():
            if is_signing_algorithm_supported(key_id):
                key_base64 = key_data["key"]
                key_bytes = decode_base64(key_base64)
                verify_key = decode_verify_key_bytes(key_id, key_bytes)
                verify_key.time_added = time_now_ms
                verify_keys[key_id] = verify_key

        old_verify_keys = {}
        for key_id, key_data in response_json["old_verify_keys"].items():
            if is_signing_algorithm_supported(key_id):
                key_base64 = key_data["key"]
                key_bytes = decode_base64(key_base64)
                verify_key = decode_verify_key_bytes(key_id, key_bytes)
                verify_key.expired = key_data["expired_ts"]
                verify_key.time_added = time_now_ms
                old_verify_keys[key_id] = verify_key

        results = {}
        server_name = response_json["server_name"]
        for key_id in response_json["signatures"].get(server_name, {}):
            if key_id not in response_json["verify_keys"]:
                raise ValueError(
                    "Key response must include verification keys for all"
                    " signatures"
                )
            if key_id in verify_keys:
                verify_signed_json(
                    response_json,
                    server_name,
                    verify_keys[key_id]
                )

        signed_key_json = sign_json(
            response_json,
            self.config.server_name,
            self.config.signing_key[0],
        )

        signed_key_json_bytes = encode_canonical_json(signed_key_json)
        ts_valid_until_ms = signed_key_json[u"valid_until_ts"]

        updated_key_ids = set(requested_ids)
        updated_key_ids.update(verify_keys)
        updated_key_ids.update(old_verify_keys)

        response_keys.update(verify_keys)
        response_keys.update(old_verify_keys)

        yield defer.gatherResults(
            [
                self.store.store_server_keys_json(
                    server_name=server_name,
                    key_id=key_id,
                    from_server=server_name,
                    ts_now_ms=time_now_ms,
                    ts_expires_ms=ts_valid_until_ms,
                    key_json_bytes=signed_key_json_bytes,
                )
                for key_id in updated_key_ids
            ],
            consumeErrors=True,
        ).addErrback(unwrapFirstError)

        results[server_name] = response_keys

        defer.returnValue(results)
Beispiel #22
0
    def _persist_event_txn(self,
                           txn,
                           event,
                           context,
                           backfilled,
                           stream_ordering=None,
                           is_new_state=True,
                           current_state=None):
        if event.type == EventTypes.Member:
            self._store_room_member_txn(txn, event)
        elif event.type == EventTypes.Feedback:
            self._store_feedback_txn(txn, event)
        elif event.type == EventTypes.Name:
            self._store_room_name_txn(txn, event)
        elif event.type == EventTypes.Topic:
            self._store_room_topic_txn(txn, event)
        elif event.type == EventTypes.Redaction:
            self._store_redaction(txn, event)

        outlier = event.internal_metadata.is_outlier()

        event_dict = {
            k: v
            for k, v in event.get_dict().items() if k not in [
                "redacted",
                "redacted_because",
            ]
        }

        metadata_json = encode_canonical_json(
            event.internal_metadata.get_dict())

        self._simple_insert_txn(
            txn,
            table="event_json",
            values={
                "event_id": event.event_id,
                "room_id": event.room_id,
                "internal_metadata": metadata_json.decode("UTF-8"),
                "json": encode_canonical_json(event_dict).decode("UTF-8"),
            },
            or_replace=True,
        )

        vals = {
            "topological_ordering": event.depth,
            "event_id": event.event_id,
            "type": event.type,
            "room_id": event.room_id,
            "content": json.dumps(event.get_dict()["content"]),
            "processed": True,
            "outlier": outlier,
            "depth": event.depth,
        }

        if stream_ordering is not None:
            vals["stream_ordering"] = stream_ordering

        unrec = {
            k: v
            for k, v in event.get_dict().items()
            if k not in vals.keys() and k not in [
                "redacted",
                "redacted_because",
                "signatures",
                "hashes",
                "prev_events",
            ]
        }
        vals["unrecognized_keys"] = json.dumps(unrec)

        try:
            self._simple_insert_txn(
                txn,
                "events",
                vals,
                or_replace=(not outlier),
                or_ignore=bool(outlier),
            )
        except:
            logger.warn(
                "Failed to persist, probably duplicate: %s",
                event.event_id,
                exc_info=True,
            )
            raise _RollbackButIsFineException("_persist_event")

        self._handle_prev_events(
            txn,
            outlier=outlier,
            event_id=event.event_id,
            prev_events=event.prev_events,
            room_id=event.room_id,
        )

        if not outlier:
            self._store_state_groups_txn(txn, event, context)

        if current_state:
            txn.execute("DELETE FROM current_state_events WHERE room_id = ?",
                        (event.room_id, ))

            for s in current_state:
                self._simple_insert_txn(
                    txn,
                    "current_state_events",
                    {
                        "event_id": s.event_id,
                        "room_id": s.room_id,
                        "type": s.type,
                        "state_key": s.state_key,
                    },
                    or_replace=True,
                )

        is_state = hasattr(event, "state_key") and event.state_key is not None
        if is_state:
            vals = {
                "event_id": event.event_id,
                "room_id": event.room_id,
                "type": event.type,
                "state_key": event.state_key,
            }

            if hasattr(event, "replaces_state"):
                vals["prev_state"] = event.replaces_state

            self._simple_insert_txn(
                txn,
                "state_events",
                vals,
                or_replace=True,
            )

            if is_new_state:
                self._simple_insert_txn(
                    txn,
                    "current_state_events",
                    {
                        "event_id": event.event_id,
                        "room_id": event.room_id,
                        "type": event.type,
                        "state_key": event.state_key,
                    },
                    or_replace=True,
                )

            for e_id, h in event.prev_state:
                self._simple_insert_txn(
                    txn,
                    table="event_edges",
                    values={
                        "event_id": event.event_id,
                        "prev_event_id": e_id,
                        "room_id": event.room_id,
                        "is_state": 1,
                    },
                    or_ignore=True,
                )

            if not backfilled:
                self._simple_insert_txn(
                    txn,
                    table="state_forward_extremities",
                    values={
                        "event_id": event.event_id,
                        "room_id": event.room_id,
                        "type": event.type,
                        "state_key": event.state_key,
                    },
                    or_replace=True,
                )

                for prev_state_id, _ in event.prev_state:
                    self._simple_delete_txn(txn,
                                            table="state_forward_extremities",
                                            keyvalues={
                                                "event_id": prev_state_id,
                                            })

        for hash_alg, hash_base64 in event.hashes.items():
            hash_bytes = decode_base64(hash_base64)
            self._store_event_content_hash_txn(
                txn,
                event.event_id,
                hash_alg,
                hash_bytes,
            )

        for prev_event_id, prev_hashes in event.prev_events:
            for alg, hash_base64 in prev_hashes.items():
                hash_bytes = decode_base64(hash_base64)
                self._store_prev_event_hash_txn(txn, event.event_id,
                                                prev_event_id, alg, hash_bytes)

        for auth_id, _ in event.auth_events:
            self._simple_insert_txn(
                txn,
                table="event_auth",
                values={
                    "event_id": event.event_id,
                    "room_id": event.room_id,
                    "auth_id": auth_id,
                },
                or_ignore=True,
            )

        (ref_alg, ref_hash_bytes) = compute_event_reference_hash(event)
        self._store_event_reference_hash_txn(txn, event.event_id, ref_alg,
                                             ref_hash_bytes)

        if not outlier:
            self._update_min_depth_for_room_txn(txn, event.room_id,
                                                event.depth)
Beispiel #23
0
 def read_signing_key(self, signing_key_path):
     signing_key_base64 = self.read_file(signing_key_path, "signing_key")
     signing_key_bytes = decode_base64(signing_key_base64)
     return nacl.signing.SigningKey(signing_key_bytes)
Beispiel #24
0
    def process_v2_response(self, server_name, from_server, response_json,
                            requested_id=None):
        time_now_ms = self.clock.time_msec()
        response_keys = {}
        verify_keys = {}
        for key_id, key_data in response_json["verify_keys"].items():
            if is_signing_algorithm_supported(key_id):
                key_base64 = key_data["key"]
                key_bytes = decode_base64(key_base64)
                verify_key = decode_verify_key_bytes(key_id, key_bytes)
                verify_key.time_added = time_now_ms
                verify_keys[key_id] = verify_key

        old_verify_keys = {}
        for key_id, key_data in response_json["old_verify_keys"].items():
            if is_signing_algorithm_supported(key_id):
                key_base64 = key_data["key"]
                key_bytes = decode_base64(key_base64)
                verify_key = decode_verify_key_bytes(key_id, key_bytes)
                verify_key.expired = key_data["expired_ts"]
                verify_key.time_added = time_now_ms
                old_verify_keys[key_id] = verify_key

        for key_id in response_json["signatures"].get(server_name, {}):
            if key_id not in response_json["verify_keys"]:
                raise ValueError(
                    "Key response must include verification keys for all"
                    " signatures"
                )
            if key_id in verify_keys:
                verify_signed_json(
                    response_json,
                    server_name,
                    verify_keys[key_id]
                )

        signed_key_json = sign_json(
            response_json,
            self.config.server_name,
            self.config.signing_key[0],
        )

        signed_key_json_bytes = encode_canonical_json(signed_key_json)
        ts_valid_until_ms = signed_key_json[u"valid_until_ts"]

        updated_key_ids = set()
        if requested_id is not None:
            updated_key_ids.add(requested_id)
        updated_key_ids.update(verify_keys)
        updated_key_ids.update(old_verify_keys)

        response_keys.update(verify_keys)
        response_keys.update(old_verify_keys)

        for key_id in updated_key_ids:
            yield self.store.store_server_keys_json(
                server_name=server_name,
                key_id=key_id,
                from_server=server_name,
                ts_now_ms=time_now_ms,
                ts_expires_ms=ts_valid_until_ms,
                key_json_bytes=signed_key_json_bytes,
            )

        defer.returnValue(response_keys)

        raise ValueError("No verification key found for given key ids")
Beispiel #25
0
def select_pdus(cursor):
    cursor.execute(
        "SELECT pdu_id, origin FROM pdus ORDER BY depth ASC"
    )

    ids = cursor.fetchall()

    pdu_tuples = store._get_pdu_tuples(cursor, ids)

    pdus = [Pdu.from_pdu_tuple(p) for p in pdu_tuples]

    reference_hashes = {}

    for pdu in pdus:
        try:
            if pdu.prev_pdus:
                print "PROCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus
                for pdu_id, origin, hashes in pdu.prev_pdus:
                    ref_alg, ref_hsh = reference_hashes[(pdu_id, origin)]
                    hashes[ref_alg] = encode_base64(ref_hsh)
                    store._store_prev_pdu_hash_txn(cursor,  pdu.pdu_id, pdu.origin, pdu_id, origin, ref_alg, ref_hsh)
                print "SUCCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus
            pdu = add_event_pdu_content_hash(pdu)
            ref_alg, ref_hsh = compute_pdu_event_reference_hash(pdu)
            reference_hashes[(pdu.pdu_id, pdu.origin)] = (ref_alg, ref_hsh)
            store._store_pdu_reference_hash_txn(cursor, pdu.pdu_id, pdu.origin, ref_alg, ref_hsh)

            for alg, hsh_base64 in pdu.hashes.items():
                print alg, hsh_base64
                store._store_pdu_content_hash_txn(cursor, pdu.pdu_id, pdu.origin, alg, decode_base64(hsh_base64))

        except:
            print "FAILED_", pdu.pdu_id, pdu.origin, pdu.prev_pdus
Beispiel #26
0
    def _persist_event_txn(self,
                           txn,
                           event,
                           context,
                           backfilled,
                           stream_ordering=None,
                           is_new_state=True,
                           current_state=None):

        # Remove the any existing cache entries for the event_id
        self._get_event_cache.pop(event.event_id)

        # We purposefully do this first since if we include a `current_state`
        # key, we *want* to update the `current_state_events` table
        if current_state:
            txn.execute("DELETE FROM current_state_events WHERE room_id = ?",
                        (event.room_id, ))

            for s in current_state:
                self._simple_insert_txn(
                    txn,
                    "current_state_events",
                    {
                        "event_id": s.event_id,
                        "room_id": s.room_id,
                        "type": s.type,
                        "state_key": s.state_key,
                    },
                    or_replace=True,
                )

        if event.is_state() and is_new_state:
            if not backfilled and not context.rejected:
                self._simple_insert_txn(
                    txn,
                    table="state_forward_extremities",
                    values={
                        "event_id": event.event_id,
                        "room_id": event.room_id,
                        "type": event.type,
                        "state_key": event.state_key,
                    },
                    or_replace=True,
                )

                for prev_state_id, _ in event.prev_state:
                    self._simple_delete_txn(txn,
                                            table="state_forward_extremities",
                                            keyvalues={
                                                "event_id": prev_state_id,
                                            })

        outlier = event.internal_metadata.is_outlier()

        if not outlier:
            self._store_state_groups_txn(txn, event, context)

            self._update_min_depth_for_room_txn(txn, event.room_id,
                                                event.depth)

        self._handle_prev_events(
            txn,
            outlier=outlier,
            event_id=event.event_id,
            prev_events=event.prev_events,
            room_id=event.room_id,
        )

        have_persisted = self._simple_select_one_onecol_txn(
            txn,
            table="event_json",
            keyvalues={"event_id": event.event_id},
            retcol="event_id",
            allow_none=True,
        )

        metadata_json = encode_canonical_json(
            event.internal_metadata.get_dict())

        # If we have already persisted this event, we don't need to do any
        # more processing.
        # The processing above must be done on every call to persist event,
        # since they might not have happened on previous calls. For example,
        # if we are persisting an event that we had persisted as an outlier,
        # but is no longer one.
        if have_persisted:
            if not outlier:
                sql = ("UPDATE event_json SET internal_metadata = ?"
                       " WHERE event_id = ?")
                txn.execute(sql, (
                    metadata_json.decode("UTF-8"),
                    event.event_id,
                ))

                sql = ("UPDATE events SET outlier = 0" " WHERE event_id = ?")
                txn.execute(sql, (event.event_id, ))
            return

        if event.type == EventTypes.Member:
            self._store_room_member_txn(txn, event)
        elif event.type == EventTypes.Feedback:
            self._store_feedback_txn(txn, event)
        elif event.type == EventTypes.Name:
            self._store_room_name_txn(txn, event)
        elif event.type == EventTypes.Topic:
            self._store_room_topic_txn(txn, event)
        elif event.type == EventTypes.Redaction:
            self._store_redaction(txn, event)

        event_dict = {
            k: v
            for k, v in event.get_dict().items() if k not in [
                "redacted",
                "redacted_because",
            ]
        }

        self._simple_insert_txn(
            txn,
            table="event_json",
            values={
                "event_id": event.event_id,
                "room_id": event.room_id,
                "internal_metadata": metadata_json.decode("UTF-8"),
                "json": encode_canonical_json(event_dict).decode("UTF-8"),
            },
            or_replace=True,
        )

        content = encode_canonical_json(event.content).decode("UTF-8")

        vals = {
            "topological_ordering": event.depth,
            "event_id": event.event_id,
            "type": event.type,
            "room_id": event.room_id,
            "content": content,
            "processed": True,
            "outlier": outlier,
            "depth": event.depth,
        }

        if stream_ordering is not None:
            vals["stream_ordering"] = stream_ordering

        unrec = {
            k: v
            for k, v in event.get_dict().items()
            if k not in vals.keys() and k not in [
                "redacted",
                "redacted_because",
                "signatures",
                "hashes",
                "prev_events",
            ]
        }

        vals["unrecognized_keys"] = encode_canonical_json(unrec).decode(
            "UTF-8")

        try:
            self._simple_insert_txn(
                txn,
                "events",
                vals,
                or_replace=(not outlier),
                or_ignore=bool(outlier),
            )
        except:
            logger.warn(
                "Failed to persist, probably duplicate: %s",
                event.event_id,
                exc_info=True,
            )
            raise _RollbackButIsFineException("_persist_event")

        if context.rejected:
            self._store_rejections_txn(txn, event.event_id, context.rejected)

        if event.is_state():
            vals = {
                "event_id": event.event_id,
                "room_id": event.room_id,
                "type": event.type,
                "state_key": event.state_key,
            }

            # TODO: How does this work with backfilling?
            if hasattr(event, "replaces_state"):
                vals["prev_state"] = event.replaces_state

            self._simple_insert_txn(
                txn,
                "state_events",
                vals,
                or_replace=True,
            )

            if is_new_state and not context.rejected:
                self._simple_insert_txn(
                    txn,
                    "current_state_events",
                    {
                        "event_id": event.event_id,
                        "room_id": event.room_id,
                        "type": event.type,
                        "state_key": event.state_key,
                    },
                    or_replace=True,
                )

            for e_id, h in event.prev_state:
                self._simple_insert_txn(
                    txn,
                    table="event_edges",
                    values={
                        "event_id": event.event_id,
                        "prev_event_id": e_id,
                        "room_id": event.room_id,
                        "is_state": 1,
                    },
                    or_ignore=True,
                )

        for hash_alg, hash_base64 in event.hashes.items():
            hash_bytes = decode_base64(hash_base64)
            self._store_event_content_hash_txn(
                txn,
                event.event_id,
                hash_alg,
                hash_bytes,
            )

        for prev_event_id, prev_hashes in event.prev_events:
            for alg, hash_base64 in prev_hashes.items():
                hash_bytes = decode_base64(hash_base64)
                self._store_prev_event_hash_txn(txn, event.event_id,
                                                prev_event_id, alg, hash_bytes)

        for auth_id, _ in event.auth_events:
            self._simple_insert_txn(
                txn,
                table="event_auth",
                values={
                    "event_id": event.event_id,
                    "room_id": event.room_id,
                    "auth_id": auth_id,
                },
                or_ignore=True,
            )

        (ref_alg, ref_hash_bytes) = compute_event_reference_hash(event)
        self._store_event_reference_hash_txn(txn, event.event_id, ref_alg,
                                             ref_hash_bytes)
Beispiel #27
0
    def _persist_event_txn(
        self, txn, event, context, backfilled, stream_ordering=None, is_new_state=True, current_state=None
    ):

        # Remove the any existing cache entries for the event_id
        txn.call_after(self._invalidate_get_event_cache, event.event_id)

        # We purposefully do this first since if we include a `current_state`
        # key, we *want* to update the `current_state_events` table
        if current_state:
            txn.call_after(self.get_current_state_for_key.invalidate_all)
            txn.call_after(self.get_rooms_for_user.invalidate_all)
            txn.call_after(self.get_users_in_room.invalidate, event.room_id)
            txn.call_after(self.get_joined_hosts_for_room.invalidate, event.room_id)
            txn.call_after(self.get_room_name_and_aliases, event.room_id)

            self._simple_delete_txn(txn, table="current_state_events", keyvalues={"room_id": event.room_id})

            for s in current_state:
                self._simple_insert_txn(
                    txn,
                    "current_state_events",
                    {"event_id": s.event_id, "room_id": s.room_id, "type": s.type, "state_key": s.state_key},
                )

        outlier = event.internal_metadata.is_outlier()

        if not outlier:
            self._update_min_depth_for_room_txn(txn, event.room_id, event.depth)

        have_persisted = self._simple_select_one_txn(
            txn,
            table="events",
            keyvalues={"event_id": event.event_id},
            retcols=["event_id", "outlier"],
            allow_none=True,
        )

        metadata_json = encode_json(event.internal_metadata.get_dict(), using_frozen_dicts=USE_FROZEN_DICTS).decode(
            "UTF-8"
        )

        # If we have already persisted this event, we don't need to do any
        # more processing.
        # The processing above must be done on every call to persist event,
        # since they might not have happened on previous calls. For example,
        # if we are persisting an event that we had persisted as an outlier,
        # but is no longer one.
        if have_persisted:
            if not outlier and have_persisted["outlier"]:
                self._store_state_groups_txn(txn, event, context)

                sql = "UPDATE event_json SET internal_metadata = ?" " WHERE event_id = ?"
                txn.execute(sql, (metadata_json, event.event_id))

                sql = "UPDATE events SET outlier = ?" " WHERE event_id = ?"
                txn.execute(sql, (False, event.event_id))
            return

        if not outlier:
            self._store_state_groups_txn(txn, event, context)

        self._handle_prev_events(
            txn, outlier=outlier, event_id=event.event_id, prev_events=event.prev_events, room_id=event.room_id
        )

        if event.type == EventTypes.Member:
            self._store_room_member_txn(txn, event)
        elif event.type == EventTypes.Name:
            self._store_room_name_txn(txn, event)
        elif event.type == EventTypes.Topic:
            self._store_room_topic_txn(txn, event)
        elif event.type == EventTypes.Redaction:
            self._store_redaction(txn, event)

        event_dict = {k: v for k, v in event.get_dict().items() if k not in ["redacted", "redacted_because"]}

        self._simple_insert_txn(
            txn,
            table="event_json",
            values={
                "event_id": event.event_id,
                "room_id": event.room_id,
                "internal_metadata": metadata_json,
                "json": encode_json(event_dict, using_frozen_dicts=USE_FROZEN_DICTS).decode("UTF-8"),
            },
        )

        content = encode_json(event.content, using_frozen_dicts=USE_FROZEN_DICTS).decode("UTF-8")

        vals = {
            "topological_ordering": event.depth,
            "event_id": event.event_id,
            "type": event.type,
            "room_id": event.room_id,
            "content": content,
            "processed": True,
            "outlier": outlier,
            "depth": event.depth,
        }

        unrec = {
            k: v
            for k, v in event.get_dict().items()
            if k not in vals.keys() and k not in ["redacted", "redacted_because", "signatures", "hashes", "prev_events"]
        }

        vals["unrecognized_keys"] = encode_json(unrec, using_frozen_dicts=USE_FROZEN_DICTS).decode("UTF-8")

        sql = (
            "INSERT INTO events"
            " (stream_ordering, topological_ordering, event_id, type,"
            " room_id, content, processed, outlier, depth)"
            " VALUES (?,?,?,?,?,?,?,?,?)"
        )

        txn.execute(
            sql,
            (
                stream_ordering,
                event.depth,
                event.event_id,
                event.type,
                event.room_id,
                content,
                True,
                outlier,
                event.depth,
            ),
        )

        if context.rejected:
            self._store_rejections_txn(txn, event.event_id, context.rejected)

        for hash_alg, hash_base64 in event.hashes.items():
            hash_bytes = decode_base64(hash_base64)
            self._store_event_content_hash_txn(txn, event.event_id, hash_alg, hash_bytes)

        for prev_event_id, prev_hashes in event.prev_events:
            for alg, hash_base64 in prev_hashes.items():
                hash_bytes = decode_base64(hash_base64)
                self._store_prev_event_hash_txn(txn, event.event_id, prev_event_id, alg, hash_bytes)

        self._simple_insert_many_txn(
            txn,
            table="event_auth",
            values=[
                {"event_id": event.event_id, "room_id": event.room_id, "auth_id": auth_id}
                for auth_id, _ in event.auth_events
            ],
        )

        (ref_alg, ref_hash_bytes) = compute_event_reference_hash(event)
        self._store_event_reference_hash_txn(txn, event.event_id, ref_alg, ref_hash_bytes)

        if event.is_state():
            vals = {
                "event_id": event.event_id,
                "room_id": event.room_id,
                "type": event.type,
                "state_key": event.state_key,
            }

            # TODO: How does this work with backfilling?
            if hasattr(event, "replaces_state"):
                vals["prev_state"] = event.replaces_state

            self._simple_insert_txn(txn, "state_events", vals)

            self._simple_insert_many_txn(
                txn,
                table="event_edges",
                values=[
                    {"event_id": event.event_id, "prev_event_id": e_id, "room_id": event.room_id, "is_state": True}
                    for e_id, h in event.prev_state
                ],
            )

            if is_new_state and not context.rejected:
                txn.call_after(self.get_current_state_for_key.invalidate, event.room_id, event.type, event.state_key)

                if event.type == EventTypes.Name or event.type == EventTypes.Aliases:
                    txn.call_after(self.get_room_name_and_aliases.invalidate, event.room_id)

                self._simple_upsert_txn(
                    txn,
                    "current_state_events",
                    keyvalues={"room_id": event.room_id, "type": event.type, "state_key": event.state_key},
                    values={"event_id": event.event_id},
                )

        return
Beispiel #28
0
    def process_v2_response(self,
                            server_name,
                            from_server,
                            response_json,
                            requested_id=None):
        time_now_ms = self.clock.time_msec()
        response_keys = {}
        verify_keys = {}
        for key_id, key_data in response_json["verify_keys"].items():
            if is_signing_algorithm_supported(key_id):
                key_base64 = key_data["key"]
                key_bytes = decode_base64(key_base64)
                verify_key = decode_verify_key_bytes(key_id, key_bytes)
                verify_key.time_added = time_now_ms
                verify_keys[key_id] = verify_key

        old_verify_keys = {}
        for key_id, key_data in response_json["old_verify_keys"].items():
            if is_signing_algorithm_supported(key_id):
                key_base64 = key_data["key"]
                key_bytes = decode_base64(key_base64)
                verify_key = decode_verify_key_bytes(key_id, key_bytes)
                verify_key.expired = key_data["expired_ts"]
                verify_key.time_added = time_now_ms
                old_verify_keys[key_id] = verify_key

        for key_id in response_json["signatures"][server_name]:
            if key_id not in response_json["verify_keys"]:
                raise ValueError(
                    "Key response must include verification keys for all"
                    " signatures")
            if key_id in verify_keys:
                verify_signed_json(response_json, server_name,
                                   verify_keys[key_id])

        signed_key_json = sign_json(
            response_json,
            self.config.server_name,
            self.config.signing_key[0],
        )

        signed_key_json_bytes = encode_canonical_json(signed_key_json)
        ts_valid_until_ms = signed_key_json[u"valid_until_ts"]

        updated_key_ids = set()
        if requested_id is not None:
            updated_key_ids.add(requested_id)
        updated_key_ids.update(verify_keys)
        updated_key_ids.update(old_verify_keys)

        response_keys.update(verify_keys)
        response_keys.update(old_verify_keys)

        for key_id in updated_key_ids:
            yield self.store.store_server_keys_json(
                server_name=server_name,
                key_id=key_id,
                from_server=server_name,
                ts_now_ms=time_now_ms,
                ts_expires_ms=ts_valid_until_ms,
                key_json_bytes=signed_key_json_bytes,
            )

        defer.returnValue(response_keys)

        raise ValueError("No verification key found for given key ids")
Beispiel #29
0
 def read_signing_key(signing_key_path):
     with open(signing_key_path) as signing_key_file:
         signing_key_b64 = signing_key_file.read()
         signing_key_bytes = decode_base64(signing_key_b64)
         return nacl.signing.SigningKey(signing_key_bytes)
Beispiel #30
0
 def read_signing_key(signing_key_path):
     with open(signing_key_path) as signing_key_file:
         signing_key_b64 = signing_key_file.read()
         signing_key_bytes = decode_base64(signing_key_b64)
         return nacl.signing.SigningKey(signing_key_bytes)