Exemple #1
0
    async def _decrypt_media(
        self,
        data: AsyncIterator[bytes],
        info: EncryptedMediaInfo,
    ) -> AsyncIterator[bytes]:

        try:
            wanted_sha256 = decode_base64(info.sha256)
            aes256_key = decode_base64(info.key)
            init_vector = decode_base64(info.init_vector)
        except BinAsciiError as e:
            raise err.MediaInvalidBase64(next(iter(e.args), ""))

        sha256 = SHA256.new()
        prefix = init_vector[:8]
        init_value = int.from_bytes(init_vector[8:], "big")
        counter = Counter.new(64, prefix=prefix, initial_value=init_value)

        try:
            cipher = AES.new(aes256_key, AES.MODE_CTR, counter=counter)
        except ValueError as e:
            raise err.MediaAESError(next(iter(e.args), ""))

        async for chunk in data:
            sha256.update(chunk)
            yield cipher.decrypt(chunk)

        got_sha256 = sha256.digest()

        if wanted_sha256 != got_sha256:
            raise err.MediaSHA256Mismatch(wanted_sha256, got_sha256)
def decrypt_attachment(ciphertext, info):
    """Decrypt an encrypted attachment.

    Args:
        ciphertext (bytes): The data to decrypt.
        info (dict): The information needed to decrypt the attachment.

            | key: AES-CTR JWK key object.
            | iv: Base64 encoded 16 byte AES-CTR IV.
            | hashes.sha256: Base64 encoded SHA-256 hash of the ciphertext.

    Returns:
        The plaintext bytes.

    Raises:
        RuntimeError if the integrity check fails.
    """
    expected_hash = unpaddedbase64.decode_base64(info['hashes']['sha256'])
    h = SHA256.new()
    h.update(ciphertext)
    if h.digest() != expected_hash:
        raise RuntimeError('Mismatched SHA-256 digest.')

    key = unpaddedbase64.decode_base64(info['key']['k'])
    # Drop last 8 bytes, which are 0
    iv = unpaddedbase64.decode_base64(info['iv'])[:8]
    ctr = Counter.new(64, prefix=iv, initial_value=0)
    cipher = AES.new(key, AES.MODE_CTR, counter=ctr)

    return cipher.decrypt(ciphertext)
Exemple #3
0
 def from_token(cls, token):
     if PY3:
         # The argument raw=False is only available on new versions of
         # msgpack, and only really needed on Python 3. Gate it behind
         # a PY3 check to avoid causing issues on Debian-packaged versions.
         decoded = msgpack.loads(decode_base64(token), raw=False)
     else:
         decoded = msgpack.loads(decode_base64(token))
     return RoomListNextBatch(
         **{cls.REVERSE_KEY_DICT[key]: val
            for key, val in decoded.items()})
Exemple #4
0
 def from_token(cls, token):
     if PY3:
         # The argument raw=False is only available on new versions of
         # msgpack, and only really needed on Python 3. Gate it behind
         # a PY3 check to avoid causing issues on Debian-packaged versions.
         decoded = msgpack.loads(decode_base64(token), raw=False)
     else:
         decoded = msgpack.loads(decode_base64(token))
     return RoomListNextBatch(**{
         cls.REVERSE_KEY_DICT[key]: val
         for key, val in decoded.items()
     })
def check_event_content_hash(event, hash_algorithm=hashlib.sha256):
    """Check whether the hash for this PDU matches the contents"""
    name, expected_hash = compute_content_hash(event.get_pdu_json(), hash_algorithm)
    logger.debug("Expecting hash: %s", encode_base64(expected_hash))

    # some malformed events lack a 'hashes'. Protect against it being missing
    # or a weird type by basically treating it the same as an unhashed event.
    hashes = event.get("hashes")
    if not isinstance(hashes, dict):
        raise SynapseError(400, "Malformed 'hashes'", Codes.UNAUTHORIZED)

    if name not in hashes:
        raise SynapseError(
            400,
            "Algorithm %s not in hashes %s" % (
                name, list(hashes),
            ),
            Codes.UNAUTHORIZED,
        )
    message_hash_base64 = hashes[name]
    try:
        message_hash_bytes = decode_base64(message_hash_base64)
    except Exception:
        raise SynapseError(
            400,
            "Invalid base64: %s" % (message_hash_base64,),
            Codes.UNAUTHORIZED,
        )
    return message_hash_bytes == expected_hash
Exemple #6
0
def check_event_content_hash(event: EventBase,
                             hash_algorithm: Hasher = hashlib.sha256) -> bool:
    """Check whether the hash for this PDU matches the contents"""
    name, expected_hash = compute_content_hash(event.get_pdu_json(),
                                               hash_algorithm)
    logger.debug(
        "Verifying content hash on %s (expecting: %s)",
        event.event_id,
        encode_base64(expected_hash),
    )

    # some malformed events lack a 'hashes'. Protect against it being missing
    # or a weird type by basically treating it the same as an unhashed event.
    hashes = event.get("hashes")
    # nb it might be a frozendict or a dict
    if not isinstance(hashes, collections.abc.Mapping):
        raise SynapseError(400, "Malformed 'hashes': %s" % (type(hashes), ),
                           Codes.UNAUTHORIZED)

    if name not in hashes:
        raise SynapseError(
            400,
            "Algorithm %s not in hashes %s" % (name, list(hashes)),
            Codes.UNAUTHORIZED,
        )
    message_hash_base64 = hashes[name]
    try:
        message_hash_bytes = decode_base64(message_hash_base64)
    except Exception:
        raise SynapseError(400, "Invalid base64: %s" % (message_hash_base64, ),
                           Codes.UNAUTHORIZED)
    return message_hash_bytes == expected_hash
Exemple #7
0
def _parse_key_servers(key_servers, federation_verify_certificates):
    try:
        jsonschema.validate(key_servers, TRUSTED_KEY_SERVERS_SCHEMA)
    except jsonschema.ValidationError as e:
        raise ConfigError("Unable to parse 'trusted_key_servers': " +
                          e.message)

    for server in key_servers:
        server_name = server["server_name"]
        result = TrustedKeyServer(server_name=server_name)

        verify_keys = server.get("verify_keys")
        if verify_keys is not None:
            result.verify_keys = {}
            for key_id, key_base64 in verify_keys.items():
                if not is_signing_algorithm_supported(key_id):
                    raise ConfigError(
                        "Unsupported signing algorithm on key %s for server %s in "
                        "trusted_key_servers" % (key_id, server_name))
                try:
                    key_bytes = decode_base64(key_base64)
                    verify_key = decode_verify_key_bytes(key_id, key_bytes)
                except Exception as e:
                    raise ConfigError(
                        "Unable to parse key %s for server %s in "
                        "trusted_key_servers: %s" % (key_id, server_name, e))

                result.verify_keys[key_id] = verify_key

        if not federation_verify_certificates and not server.get(
                "accept_keys_insecurely"):
            _assert_keyserver_has_verify_keys(result)

        yield result
Exemple #8
0
def decrypt(encrypted_payload: str, passphrase: str):
    decoded_payload = decode_base64(encrypted_payload)

    version = decoded_payload[0]

    if isinstance(version, str):
        version = ord(version)

    if version != 1:
        raise ValueError("Unsupported export format version.")

    salt = decoded_payload[1:17]
    iv = int.from_bytes(decoded_payload[17:33], byteorder="big")
    count = int.from_bytes(decoded_payload[33:37], byteorder="big")
    encrypted_data = decoded_payload[37:-32]
    expected_hmac = decoded_payload[-32:]

    derived_key = PBKDF2(passphrase, salt, 64, count, prf)  # type: ignore
    aes_key = derived_key[:32]
    hmac_key = derived_key[32:64]

    hmac = HMAC.new(hmac_key, decoded_payload[:-32], SHA256).digest()

    if hmac != expected_hmac:
        raise ValueError("HMAC check failed for encrypted payload.")

    ctr = Counter.new(128, initial_value=iv)
    cipher = AES.new(aes_key, AES.MODE_CTR, counter=ctr)
    return cipher.decrypt(encrypted_data)
Exemple #9
0
    def _verify_third_party_invite(self, event, auth_events):
        """
        Validates that the invite event is authorized by a previous third-party invite.

        Checks that the public key, and keyserver, match those in the third party invite,
        and that the invite event has a signature issued using that public key.

        Args:
            event: The m.room.member join event being validated.
            auth_events: All relevant previous context events which may be used
                for authorization decisions.

        Return:
            True if the event fulfills the expectations of a previous third party
            invite event.
        """
        if "third_party_invite" not in event.content:
            return False
        if "signed" not in event.content["third_party_invite"]:
            return False
        signed = event.content["third_party_invite"]["signed"]
        for key in {"mxid", "token"}:
            if key not in signed:
                return False

        token = signed["token"]

        invite_event = auth_events.get(
            (EventTypes.ThirdPartyInvite, token,)
        )
        if not invite_event:
            return False

        if event.user_id != invite_event.user_id:
            return False
        try:
            public_key = invite_event.content["public_key"]
            if signed["mxid"] != event.state_key:
                return False
            if signed["token"] != token:
                return False
            for server, signature_block in signed["signatures"].items():
                for key_name, encoded_signature in signature_block.items():
                    if not key_name.startswith("ed25519:"):
                        return False
                    verify_key = decode_verify_key_bytes(
                        key_name,
                        decode_base64(public_key)
                    )
                    verify_signed_json(signed, server, verify_key)

                    # We got the public key from the invite, so we know that the
                    # correct server signed the signed bundle.
                    # The caller is responsible for checking that the signing
                    # server has not revoked that public key.
                    return True
            return False
        except (KeyError, SignatureVerifyException,):
            return False
Exemple #10
0
    def _verify_third_party_invite(self, event, auth_events):
        """
        Validates that the invite event is authorized by a previous third-party invite.

        Checks that the public key, and keyserver, match those in the third party invite,
        and that the invite event has a signature issued using that public key.

        Args:
            event: The m.room.member join event being validated.
            auth_events: All relevant previous context events which may be used
                for authorization decisions.

        Return:
            True if the event fulfills the expectations of a previous third party
            invite event.
        """
        if "third_party_invite" not in event.content:
            return False
        if "signed" not in event.content["third_party_invite"]:
            return False
        signed = event.content["third_party_invite"]["signed"]
        for key in {"mxid", "token"}:
            if key not in signed:
                return False

        token = signed["token"]

        invite_event = auth_events.get(
            (EventTypes.ThirdPartyInvite, token,)
        )
        if not invite_event:
            return False

        if event.user_id != invite_event.user_id:
            return False
        try:
            public_key = invite_event.content["public_key"]
            if signed["mxid"] != event.state_key:
                return False
            if signed["token"] != token:
                return False
            for server, signature_block in signed["signatures"].items():
                for key_name, encoded_signature in signature_block.items():
                    if not key_name.startswith("ed25519:"):
                        return False
                    verify_key = decode_verify_key_bytes(
                        key_name,
                        decode_base64(public_key)
                    )
                    verify_signed_json(signed, server, verify_key)

                    # We got the public key from the invite, so we know that the
                    # correct server signed the signed bundle.
                    # The caller is responsible for checking that the signing
                    # server has not revoked that public key.
                    return True
            return False
        except (KeyError, SignatureVerifyException,):
            return False
Exemple #11
0
def decrypt_attachment(ciphertext: bytes, key: str, hash: str,
                       iv: str) -> bytes:
    """Decrypt an encrypted attachment.

    Args:
        ciphertext: The data to decrypt.
        key: AES_CTR JWK key object.
        hash: Base64 encoded SHA-256 hash of the ciphertext.
        iv: Base64 encoded 16 byte AES-CTR IV.
    Returns:
        The plaintext bytes.
    Raises:
        EncryptionError: if the integrity check fails.


    """
    expected_hash = unpaddedbase64.decode_base64(hash)

    h = SHA256.new()
    h.update(ciphertext)

    if h.digest() != expected_hash:
        raise DecryptionError("Mismatched SHA-256 digest.")

    try:
        byte_key: bytes = unpaddedbase64.decode_base64(key)
    except (binascii.Error, TypeError):
        raise DecryptionError("Error decoding key.")

    try:
        byte_iv: bytes = unpaddedbase64.decode_base64(iv)
        prefix = byte_iv[:8]
        # A non-zero IV counter is not spec-compliant, but some clients still do it,
        # so decode the counter part too.
        initial_value = struct.unpack(">Q", byte_iv[8:])[0]
    except (binascii.Error, TypeError, IndexError, struct.error):
        raise DecryptionError("Error decoding initial values.")

    ctr = Counter.new(64, prefix=prefix, initial_value=initial_value)

    try:
        cipher = AES.new(byte_key, AES.MODE_CTR, counter=ctr)
    except ValueError as e:
        raise DecryptionError("Failed to create AES cipher") from e

    return cipher.decrypt(ciphertext)
Exemple #12
0
def get_server_keys(server_name, target, port):
    url = "https://%s:%i/_matrix/key/v1" % (target, port)
    keys = json.load(urllib2.urlopen(url))
    verify_keys = {}
    for key_id, key_base64 in keys["verify_keys"].items():
        verify_key = decode_verify_key_bytes(key_id, decode_base64(key_base64))
        verify_signed_json(keys, server_name, verify_key)
        verify_keys[key_id] = verify_key
    return verify_keys
Exemple #13
0
def get_server_keys(server_name, target, port):
    url = "https://%s:%i/_matrix/key/v1" % (target, port)
    keys = json.load(urllib2.urlopen(url))
    verify_keys = {}
    for key_id, key_base64 in keys["verify_keys"].items():
        verify_key = decode_verify_key_bytes(key_id, decode_base64(key_base64))
        verify_signed_json(keys, server_name, verify_key)
        verify_keys[key_id] = verify_key
    return verify_keys
Exemple #14
0
def decrypt_attachment(ciphertext: bytes, key: str, hash: str, iv: str):
    """Decrypt an encrypted attachment.

    Args:
        ciphertext (bytes): The data to decrypt.
        key (str): AES_CTR JWK key object.
        hash (str): Base64 encoded SHA-256 hash of the ciphertext.
        iv (str): Base64 encoded 16 byte AES-CTR IV.
    Returns:
        The plaintext bytes.
    Raises:
        EncryptionError if the integrity check fails.


    """
    expected_hash = unpaddedbase64.decode_base64(hash)

    h = SHA256.new()
    h.update(ciphertext)

    if h.digest() != expected_hash:
        raise EncryptionError("Mismatched SHA-256 digest.")

    try:
        byte_key: bytes = unpaddedbase64.decode_base64(key)
    except (BinAsciiError, TypeError):
        raise EncryptionError("Error decoding key.")

    try:
        byte_iv: bytes = unpaddedbase64.decode_base64(iv)
    except (BinAsciiError, TypeError):
        raise EncryptionError("Error decoding initial values.")

    prefix: bytes = byte_iv[:8]
    cnt: int = int.from_bytes(byte_iv[8:], 'big')
    ctr = Counter.new(64, prefix=prefix, initial_value=cnt)

    try:
        cipher = AES.new(byte_key, AES.MODE_CTR, counter=ctr)
    except ValueError as e:
        raise EncryptionError(e)

    return cipher.decrypt(ciphertext)
Exemple #15
0
def decrypt_attachment(ciphertext: bytes, key: str, hash: str,
                       iv: str) -> bytes:
    """Decrypt an encrypted attachment.

    Args:
        ciphertext: The data to decrypt.
        key: AES_CTR JWK key object.
        hash: Base64 encoded SHA-256 hash of the ciphertext.
        iv: Base64 encoded 16 byte AES-CTR IV.
    Returns:
        The plaintext bytes.
    Raises:
        EncryptionError if the integrity check fails.


    """
    expected_hash = unpaddedbase64.decode_base64(hash)

    h = SHA256.new()
    h.update(ciphertext)

    if h.digest() != expected_hash:
        raise DecryptionError("Mismatched SHA-256 digest.")

    try:
        byte_key: bytes = unpaddedbase64.decode_base64(key)
    except (BinAsciiError, TypeError):
        raise DecryptionError("Error decoding key.")

    try:
        # Drop last 8 bytes, which are 0
        byte_iv: bytes = unpaddedbase64.decode_base64(iv)[:8]
    except (BinAsciiError, TypeError):
        raise DecryptionError("Error decoding initial values.")

    ctr = Counter.new(64, prefix=byte_iv, initial_value=0)

    try:
        cipher = AES.new(byte_key, AES.MODE_CTR, counter=ctr)
    except ValueError as e:
        raise DecryptionError("Failed to create AES cipher") from e

    return cipher.decrypt(ciphertext)
Exemple #16
0
 def read_perspectives(self, perspectives_servers):
     servers = {}
     for server_name, server_config in perspectives_servers.items():
         for key_id, key_data in server_config["verify_keys"].items():
             if is_signing_algorithm_supported(key_id):
                 key_base64 = key_data["key"]
                 key_bytes = decode_base64(key_base64)
                 verify_key = decode_verify_key_bytes(key_id, key_bytes)
                 servers.setdefault(server_name, {})[key_id] = verify_key
     return servers
Exemple #17
0
def decrypt_attachment(ciphertext, key, hash, iv):
    """Decrypt an encrypted attachment.

    Args:
        ciphertext (bytes): The data to decrypt.
        key (str): AES_CTR JWK key object.
        hash (str): Base64 encoded SHA-256 hash of the ciphertext.
        iv (str): Base64 encoded 16 byte AES-CTR IV.
    Returns:
        The plaintext bytes.
    Raises:
        EncryptionError if the integrity check fails.


    """
    expected_hash = unpaddedbase64.decode_base64(hash)

    h = SHA256.new()
    h.update(ciphertext)

    if h.digest() != expected_hash:
        raise EncryptionError("Mismatched SHA-256 digest.")

    try:
        key = unpaddedbase64.decode_base64(key)
    except (base64.binascii.Error, TypeError):
        raise EncryptionError("Error decoding key.")

    try:
        # Drop last 8 bytes, which are 0
        iv = unpaddedbase64.decode_base64(iv)[:8]
    except (base64.binascii.Error, TypeError):
        raise EncryptionError("Error decoding initial values.")

    ctr = Counter.new(64, prefix=iv, initial_value=0)

    try:
        cipher = AES.new(key, AES.MODE_CTR, counter=ctr)
    except ValueError as e:
        raise EncryptionError(e)

    return cipher.decrypt(ciphertext)
Exemple #18
0
    def verifyServerSignedJson(self,
                               signed_json,
                               acceptable_server_names=None):
        """Given a signed json object, try to verify any one
        of the signatures on it

        XXX: This contains a fairly noddy version of the home server
        SRV lookup and signature verification. It does no caching (just
        fetches the signature each time and does not contact any other
        servers to do perspective checks).

        :param acceptable_server_names: If provided and not None,
        only signatures from servers in this list will be accepted.
        :type acceptable_server_names: list[unicode] or None

        :return a tuple of the server name and key name that was
        successfully verified.
        :rtype: twisted.internet.defer.Deferred[tuple[unicode]]

        :raise SignatureVerifyException: The json cannot be verified.
        """
        if 'signatures' not in signed_json:
            raise SignatureVerifyException("Signature missing")
        for server_name, sigs in signed_json['signatures'].items():
            if acceptable_server_names is not None:
                if server_name not in acceptable_server_names:
                    continue

            server_keys = yield self._getKeysForServer(server_name)
            for key_name, sig in sigs.items():
                if key_name in server_keys:
                    if 'key' not in server_keys[key_name]:
                        logger.warn("Ignoring key %s with no 'key'")
                        continue
                    key_bytes = decode_base64(server_keys[key_name]['key'])
                    verify_key = signedjson.key.decode_verify_key_bytes(
                        key_name, key_bytes)
                    logger.info("verifying sig from key %r", key_name)
                    signedjson.sign.verify_signed_json(signed_json,
                                                       server_name, verify_key)
                    logger.info("Verified signature with key %s from %s",
                                key_name, server_name)
                    defer.returnValue((server_name, key_name))
            logger.warn(
                "No matching key found for signature block %r in server keys %r",
                signed_json['signatures'],
                server_keys,
            )
        logger.warn(
            "Unable to verify any signatures from block %r. Acceptable server names: %r",
            signed_json['signatures'],
            acceptable_server_names,
        )
        raise SignatureVerifyException("No matching signature found")
def decode_verify_key_base64(algorithm, version, key_base64):
    """Decode a base64 encoded verify key
    Args:
        algorithm (str): The algorithm the key is for (currently "ed25519").
        version (str): Identifies this key out of the keys for this entity.
        key_base64 (str): Base64 encoded bytes of the key.
    Returns:
        A VerifyKey object.
    """
    key_id = "%s:%s" % (algorithm, version)
    key_bytes = decode_base64(key_base64)
    return decode_verify_key_bytes(key_id, key_bytes)
 def test_decode(self):
     self.assertEqual(decode_base64(u''), b'')
     self.assertEqual(decode_base64(u'AA'), b'\x00')
     self.assertEqual(decode_base64(u'AAA'), b'\x00\x00')
     self.assertEqual(decode_base64(u'AAAA'), b'\x00\x00\x00')
     with self.assertRaises(Exception):
         decode_base64(u'A')
Exemple #21
0
 def test_decode(self) -> None:
     self.assertEqual(decode_base64(""), b"")
     self.assertEqual(decode_base64("AA"), b"\x00")
     self.assertEqual(decode_base64("AAA"), b"\x00\x00")
     self.assertEqual(decode_base64("AAAA"), b"\x00\x00\x00")
     with self.assertRaises(Exception):
         decode_base64("A")
 def test_decode(self):
     self.assertEqual(decode_base64(u''), b'')
     self.assertEqual(decode_base64(u'AA'), b'\x00')
     self.assertEqual(decode_base64(u'AAA'), b'\x00\x00')
     self.assertEqual(decode_base64(u'AAAA'), b'\x00\x00\x00')
     with self.assertRaises(Exception):
         decode_base64(u'A')
Exemple #23
0
    async def verifyServerSignedJson(
        self,
        signed_json: SignedMatrixRequest,
        acceptable_server_names: Optional[List[str]] = None,
    ) -> Tuple[str, str]:
        """Given a signed json object, try to verify any one
        of the signatures on it

        XXX: This contains a fairly noddy version of the home server
        SRV lookup and signature verification. It does no caching (just
        fetches the signature each time and does not contact any other
        servers to do perspective checks).

        :param acceptable_server_names: If provided and not None,
        only signatures from servers in this list will be accepted.

        :return a tuple of the server name and key name that was
        successfully verified.

        :raise SignatureVerifyException: The json cannot be verified.
        """
        for server_name, sigs in signed_json.signatures.items():
            if acceptable_server_names is not None:
                if server_name not in acceptable_server_names:
                    continue

            server_keys = await self._getKeysForServer(server_name)
            for key_name, sig in sigs.items():
                if key_name in server_keys:

                    key_bytes = decode_base64(server_keys[key_name]["key"])
                    verify_key = signedjson.key.decode_verify_key_bytes(
                        key_name, key_bytes)
                    logger.info("verifying sig from key %r", key_name)
                    payload = attr.asdict(signed_json)
                    signedjson.sign.verify_signed_json(payload, server_name,
                                                       verify_key)
                    logger.info("Verified signature with key %s from %s",
                                key_name, server_name)
                    return (server_name, key_name)
            logger.warning(
                "No matching key found for signature block %r in server keys %r",
                signed_json.signatures,
                server_keys,
            )
        logger.warning(
            "Unable to verify any signatures from block %r. Acceptable server names: %r",
            signed_json.signatures,
            acceptable_server_names,
        )
        raise SignatureVerifyException("No matching signature found")
Exemple #24
0
 def read_old_signing_keys(self, old_signing_keys):
     keys = {}
     for key_id, key_data in old_signing_keys.items():
         if is_signing_algorithm_supported(key_id):
             key_base64 = key_data["key"]
             key_bytes = decode_base64(key_base64)
             verify_key = decode_verify_key_bytes(key_id, key_bytes)
             verify_key.expired_ts = key_data["expired_ts"]
             keys[key_id] = verify_key
         else:
             raise ConfigError(
                 "Unsupported signing algorithm for old key: %r" % (key_id,)
             )
     return keys
Exemple #25
0
    def __init__(self, sydent, server_name, port, pubkeys):
        super(RemotePeer, self).__init__(server_name, pubkeys)
        self.sydent = sydent

        # look up or build the replication URL
        try:
            replication_url = sydent.cfg.get(
                "peer.%s" % server_name,
                "base_replication_url",
            )
        except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
            if not port:
                port = 1001
            replication_url = "https://%s:%i" % (server_name, port)

        if replication_url[-1:] != '/':
            replication_url += "/"

        replication_url += "_matrix/identity/replicate/v1/push"
        self.replication_url = replication_url

        # Get verify key for this peer

        # Check if their key is base64 or hex encoded
        pubkey = self.pubkeys[SIGNING_KEY_ALGORITHM]
        try:
            # Check for hex encoding
            int(pubkey, 16)

            # Decode hex into bytes
            pubkey_decoded = binascii.unhexlify(pubkey)

            logger.warn(
                "Peer public key of %s is hex encoded. Please update to base64 encoding",
                server_name)
        except ValueError:
            # Check for base64 encoding
            try:
                pubkey_decoded = decode_base64(pubkey)
            except Exception as e:
                raise ConfigError(
                    "Unable to decode public key for peer %s: %s" %
                    (server_name, e), )

        self.verify_key = signedjson.key.decode_verify_key_bytes(
            SIGNING_KEY_ALGORITHM + ":", pubkey_decoded)

        # Attach metadata
        self.verify_key.alg = SIGNING_KEY_ALGORITHM
        self.verify_key.version = 0
Exemple #26
0
def verify_signed_json(json_object, signature_name, verify_key):
    # type: (JsonDict, str, VerifyKey) -> None
    """Check a signature on a signed JSON object.

    Args:
        json_object: The signed JSON object to check.
        signature_name: The name of the signature to check.
        verify_key: The key to verify the signature.

    Raises:
        SignatureVerifyException: If the signature isn't valid
    """

    try:
        signatures = json_object["signatures"]
    except KeyError:
        raise SignatureVerifyException("No signatures on this object")

    key_id = "%s:%s" % (verify_key.alg, verify_key.version)

    try:
        signature_b64 = signatures[signature_name][key_id]
    except KeyError:
        raise SignatureVerifyException("Missing signature for %s, %s" %
                                       (signature_name, key_id))

    try:
        signature = decode_base64(signature_b64)
    except Exception:
        raise SignatureVerifyException("Invalid signature base64 for %s, %s" %
                                       (signature_name, key_id))

    json_object_copy = dict(json_object)
    del json_object_copy["signatures"]
    json_object_copy.pop("unsigned", None)

    message = encode_canonical_json(json_object_copy)

    # logger.debug("VERIFY: %s %s %s", signature_name, key_id, message)

    try:
        verify_key.verify(message, signature)
    except Exception as e:
        raise SignatureVerifyException(
            "Unable to verify signature for %s: %s %s" % (
                signature_name,
                type(e),
                e,
            ))
Exemple #27
0
def read_old_signing_keys(stream):
    """Reads a list of old keys from a stream
    Args:
        stream : A stream to iterate for keys.
    Returns:
        list of VerifyKey objects.
    """
    keys = []
    for line in stream:
        algorithm, version, expired, key_base64 = line.split()
        key_name = "%s:%s" % (algorithm, version,)
        key = decode_verify_key_bytes(key_name, decode_base64(key_base64))
        key.expired = int(expired)
        keys.append(key)
    return keys
Exemple #28
0
def decode_signing_key_base64(algorithm, version, key_base64):
    """Decode a base64 encoded signing key
    Args:
        algorithm (str): The algorithm the key is for (currently "ed25519").
        version (str): Identifies this key out of the keys for this entity.
        key_base64 (str): Base64 encoded bytes of the key.
    Returns:
        A SigningKey object.
    """
    if algorithm == NACL_ED25519:
        key_bytes = decode_base64(key_base64)
        key = nacl.signing.SigningKey(key_bytes)
        key.version = version
        key.alg = NACL_ED25519
        return key
    else:
        raise ValueError("Unsupported algorithm %s" % (algorithm,))
Exemple #29
0
 def _verify_any_signature(self, data, server_hostname):
     if server_hostname not in data["signatures"]:
         raise AuthError(401, "No signature from server %s" % (server_hostname,))
     for key_name, signature in data["signatures"][server_hostname].items():
         key_data = yield self.simple_http_client.get_json(
             "%s%s/_matrix/identity/api/v1/pubkey/%s" %
             (id_server_scheme, server_hostname, key_name,),
         )
         if "public_key" not in key_data:
             raise AuthError(401, "No public key named %s from %s" %
                             (key_name, server_hostname,))
         verify_signed_json(
             data,
             server_hostname,
             decode_verify_key_bytes(key_name, decode_base64(key_data["public_key"]))
         )
         return
Exemple #30
0
    def __init__(self, sydent, server_name, port, pubkeys):
        super(RemotePeer, self).__init__(server_name, pubkeys)
        self.sydent = sydent

        # look up or build the replication URL
        try:
            replication_url = sydent.cfg.get(
                "peer.%s" % server_name, "base_replication_url",
            )
        except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
            if not port:
                port = 1001
            replication_url = "https://%s:%i" % (server_name, port)

        if replication_url[-1:] != '/':
            replication_url += "/"

        replication_url += "_matrix/identity/replicate/v1/push"
        self.replication_url = replication_url

        # Get verify key for this peer

        # Check if their key is base64 or hex encoded
        pubkey = self.pubkeys[SIGNING_KEY_ALGORITHM]
        try:
            # Check for hex encoding
            int(pubkey, 16)

            # Decode hex into bytes
            pubkey_decoded = binascii.unhexlify(pubkey)

            logger.warn("Peer public key of %s is hex encoded. Please update to base64 encoding", server_name)
        except ValueError:
            # Check for base64 encoding
            try:
                pubkey_decoded = decode_base64(pubkey)
            except Exception as e:
                raise ConfigError(
                    "Unable to decode public key for peer %s: %s" % (server_name, e),
                )

        self.verify_key = signedjson.key.decode_verify_key_bytes(SIGNING_KEY_ALGORITHM + ":", pubkey_decoded)

        # Attach metadata
        self.verify_key.alg = SIGNING_KEY_ALGORITHM
        self.verify_key.version = 0
Exemple #31
0
 def verify_any_signature(self, data, server_hostname):
     if server_hostname not in data["signatures"]:
         raise AuthError(401, "No signature from server %s" % (server_hostname,))
     for key_name, signature in data["signatures"][server_hostname].items():
         key_data = yield self.hs.get_simple_http_client().get_json(
             "%s%s/_matrix/identity/api/v1/pubkey/%s" %
             (id_server_scheme, server_hostname, key_name,),
         )
         if "public_key" not in key_data:
             raise AuthError(401, "No public key named %s from %s" %
                             (key_name, server_hostname,))
         verify_signed_json(
             data,
             server_hostname,
             decode_verify_key_bytes(key_name, decode_base64(key_data["public_key"]))
         )
         return
def decode_signing_key_base64(algorithm, version, key_base64):
    """Decode a base64 encoded signing key
    Args:
        algorithm (str): The algorithm the key is for (currently "ed25519").
        version (str): Identifies this key out of the keys for this entity.
        key_base64 (str): Base64 encoded bytes of the key.
    Returns:
        A SigningKey object.
    """
    if algorithm == NACL_ED25519:
        key_bytes = decode_base64(key_base64)
        key = nacl.signing.SigningKey(key_bytes)
        key.version = version
        key.alg = NACL_ED25519
        return key
    else:
        raise ValueError("Unsupported algorithm %s" % (algorithm, ))
Exemple #33
0
    def verifyServerSignedJson(self, signed_json, acceptable_server_names=None):
        """Given a signed json object, try to verify any one
        of the signatures on it

        XXX: This contains a fairly noddy version of the home server
        SRV lookup and signature verification. It does no caching (just
        fetches the signature each time and does not contact any other
        servers to do perspective checks).

        :param acceptable_server_names: If provided and not None,
        only signatures from servers in this list will be accepted.
        :type acceptable_server_names: list of strings

        :return a tuple of the server name and key name that was
        successfully verified. If the json cannot be verified,
        raises SignatureVerifyException.
        """
        if 'signatures' not in signed_json:
            raise SignatureVerifyException("Signature missing")
        for server_name, sigs in signed_json['signatures'].items():
            if acceptable_server_names is not None:
                if server_name not in acceptable_server_names:
                    continue

            server_keys = yield self._getKeysForServer(server_name)
            for key_name, sig in sigs.items():
                if key_name in server_keys:
                    if 'key' not in server_keys[key_name]:
                        logger.warn("Ignoring key %s with no 'key'")
                        continue
                    key_bytes = decode_base64(server_keys[key_name]['key'])
                    verify_key = signedjson.key.decode_verify_key_bytes(key_name, key_bytes)
                    logger.info("verifying sig from key %r", key_name)
                    signedjson.sign.verify_signed_json(signed_json, server_name, verify_key)
                    logger.info("Verified signature with key %s from %s", key_name, server_name)
                    defer.returnValue((server_name, key_name))
            logger.warn(
                "No matching key found for signature block %r in server keys %r",
                signed_json['signatures'], server_keys,
            )
        logger.warn(
            "Unable to verify any signatures from block %r. Acceptable server names: %r",
            signed_json['signatures'], acceptable_server_names,
        )
        raise SignatureVerifyException("No matching signature found")
Exemple #34
0
 def read_old_signing_keys(
     self, old_signing_keys: Optional[JsonDict]
 ) -> Dict[str, "VerifyKeyWithExpiry"]:
     if old_signing_keys is None:
         return {}
     keys = {}
     for key_id, key_data in old_signing_keys.items():
         if is_signing_algorithm_supported(key_id):
             key_base64 = key_data["key"]
             key_bytes = decode_base64(key_base64)
             verify_key: "VerifyKeyWithExpiry" = decode_verify_key_bytes(
                 key_id, key_bytes)  # type: ignore[assignment]
             verify_key.expired = key_data["expired_ts"]
             keys[key_id] = verify_key
         else:
             raise ConfigError(
                 "Unsupported signing algorithm for old key: %r" %
                 (key_id, ))
     return keys
Exemple #35
0
def get_verify_key_from_cross_signing_key(key_info):
    """Get the key ID and signedjson verify key from a cross-signing key dict

    Args:
        key_info (dict): a cross-signing key dict, which must have a "keys"
            property that has exactly one item in it

    Returns:
        (str, VerifyKey): the key ID and verify key for the cross-signing key
    """
    # make sure that exactly one key is provided
    if "keys" not in key_info:
        raise ValueError("Invalid key")
    keys = key_info["keys"]
    if len(keys) != 1:
        raise ValueError("Invalid key")
    # and return that one key
    for key_id, key_data in keys.items():
        return key_id, decode_verify_key_bytes(key_id, decode_base64(key_data))
Exemple #36
0
def test_encrypt_decrypt():
    plaintext = b'test'
    passphrase = 'pass'
    # Set a ridiculously low round count for this test to be fast
    ciphertext = encrypt(plaintext, passphrase, count=1)

    assert decrypt(ciphertext, passphrase) == plaintext

    ciphertext_bytes = decode_base64(ciphertext)

    # Wrong hmac
    ciphertext = encode_base64(ciphertext_bytes[:-32] + b'A' * 32)
    with pytest.raises(ValueError):
        decrypt(ciphertext, passphrase)

    # Wrong version
    ciphertext = encode_base64(bytes([42]) + ciphertext_bytes[1:])
    with pytest.raises(ValueError):
        decrypt(ciphertext, passphrase)
Exemple #37
0
    def _check_master_key_signature(
        self, user_id, master_key_id, signed_master_key, stored_master_key, devices
    ):
        """Check signatures of a user's master key made by their devices.

        Args:
            user_id (string): the user whose master key is being checked
            master_key_id (string): the ID of the user's master key
            signed_master_key (dict): the user's signed master key that was uploaded
            stored_master_key (dict): our previously-stored copy of the user's master key
            devices (iterable(dict)): the user's devices

        Returns:
            list[SignatureListItem]: a list of signatures to store

        Raises:
            SynapseError: if a signature is invalid
        """
        # for each device that signed the master key, check the signature.
        master_key_signature_list = []
        sigs = signed_master_key["signatures"]
        for signing_key_id, signature in sigs[user_id].items():
            _, signing_device_id = signing_key_id.split(":", 1)
            if (
                signing_device_id not in devices
                or signing_key_id not in devices[signing_device_id]["keys"]
            ):
                # signed by an unknown device, or the
                # device does not have the key
                raise SynapseError(400, "Invalid signature", Codes.INVALID_SIGNATURE)

            # get the key and check the signature
            pubkey = devices[signing_device_id]["keys"][signing_key_id]
            verify_key = decode_verify_key_bytes(signing_key_id, decode_base64(pubkey))
            _check_device_signature(
                user_id, verify_key, signed_master_key, stored_master_key
            )

            master_key_signature_list.append(
                SignatureListItem(signing_key_id, user_id, master_key_id, signature)
            )

        return master_key_signature_list
Exemple #38
0
def verify_signed_json(json_object, signature_name, verify_key):
    """Check a signature on a signed JSON object.

    Args:
        json_object (dict): The signed JSON object to check.
        signature_name (str): The name of the signature to check.
        verify_key (syutil.crypto.VerifyKey): The key to verify the signature.

    Raises:
        InvalidSignature: If the signature isn't valid
    """

    try:
        signatures = json_object["signatures"]
    except KeyError:
        raise SignatureVerifyException("No signatures on this object")

    key_id = "%s:%s" % (verify_key.alg, verify_key.version)

    try:
        signature_b64 = signatures[signature_name][key_id]
    except:
        raise SignatureVerifyException("Missing signature for %s, %s" % (signature_name, key_id))

    try:
        signature = decode_base64(signature_b64)
    except:
        raise SignatureVerifyException("Invalid signature base64 for %s, %s" % (signature_name, key_id))

    json_object_copy = dict(json_object)
    del json_object_copy["signatures"]
    json_object_copy.pop("unsigned", None)

    message = encode_canonical_json(json_object_copy)

    # logger.debug("VERIFY: %s %s %s", signature_name, key_id, message)

    try:
        verify_key.verify(message, signature)
    except:
        logger.exception("Error verifying signature")
        raise SignatureVerifyException("Unable to verify signature for %s " % signature_name)
Exemple #39
0
def get_verify_key_from_cross_signing_key(
        key_info: Mapping[str, Any]) -> Tuple[str, VerifyKey]:
    """Get the key ID and signedjson verify key from a cross-signing key dict

    Args:
        key_info: a cross-signing key dict, which must have a "keys"
            property that has exactly one item in it

    Returns:
        the key ID and verify key for the cross-signing key
    """
    # make sure that a `keys` field is provided
    if "keys" not in key_info:
        raise ValueError("Invalid key")
    keys = key_info["keys"]
    # and that it contains exactly one key
    if len(keys) == 1:
        key_id, key_data = next(iter(keys.items()))
        return key_id, decode_verify_key_bytes(key_id, decode_base64(key_data))
    else:
        raise ValueError("Invalid key")
Exemple #40
0
 def _verify_any_signature(self, data, server_hostname):
     if server_hostname not in data["signatures"]:
         raise AuthError(
             401, "No signature from server %s" % (server_hostname, ))
     for key_name, signature in data["signatures"][server_hostname].items():
         try:
             key_data = yield self.blacklisting_http_client.get_json(
                 "%s%s/_matrix/identity/api/v1/pubkey/%s" %
                 (id_server_scheme, server_hostname, key_name))
         except TimeoutError:
             raise SynapseError(500, "Timed out contacting identity server")
         if "public_key" not in key_data:
             raise AuthError(
                 401, "No public key named %s from %s" %
                 (key_name, server_hostname))
         verify_signed_json(
             data,
             server_hostname,
             decode_verify_key_bytes(key_name,
                                     decode_base64(key_data["public_key"])),
         )
         return
Exemple #41
0
def check_event_content_hash(event, hash_algorithm=hashlib.sha256):
    """Check whether the hash for this PDU matches the contents"""
    name, expected_hash = compute_content_hash(event, hash_algorithm)
    logger.debug("Expecting hash: %s", encode_base64(expected_hash))
    if name not in event.hashes:
        raise SynapseError(
            400,
            "Algorithm %s not in hashes %s" % (
                name, list(event.hashes),
            ),
            Codes.UNAUTHORIZED,
        )
    message_hash_base64 = event.hashes[name]
    try:
        message_hash_bytes = decode_base64(message_hash_base64)
    except:
        raise SynapseError(
            400,
            "Invalid base64: %s" % (message_hash_base64,),
            Codes.UNAUTHORIZED,
        )
    return message_hash_bytes == expected_hash
Exemple #42
0
    def process_v2_response(self, from_server, response_json,
                            requested_ids=[], only_from_server=True):
        time_now_ms = self.clock.time_msec()
        response_keys = {}
        verify_keys = {}
        for key_id, key_data in response_json["verify_keys"].items():
            if is_signing_algorithm_supported(key_id):
                key_base64 = key_data["key"]
                key_bytes = decode_base64(key_base64)
                verify_key = decode_verify_key_bytes(key_id, key_bytes)
                verify_key.time_added = time_now_ms
                verify_keys[key_id] = verify_key

        old_verify_keys = {}
        for key_id, key_data in response_json["old_verify_keys"].items():
            if is_signing_algorithm_supported(key_id):
                key_base64 = key_data["key"]
                key_bytes = decode_base64(key_base64)
                verify_key = decode_verify_key_bytes(key_id, key_bytes)
                verify_key.expired = key_data["expired_ts"]
                verify_key.time_added = time_now_ms
                old_verify_keys[key_id] = verify_key

        results = {}
        server_name = response_json["server_name"]
        if only_from_server:
            if server_name != from_server:
                raise ValueError(
                    "Expected a response for server %r not %r" % (
                        from_server, server_name
                    )
                )
        for key_id in response_json["signatures"].get(server_name, {}):
            if key_id not in response_json["verify_keys"]:
                raise ValueError(
                    "Key response must include verification keys for all"
                    " signatures"
                )
            if key_id in verify_keys:
                verify_signed_json(
                    response_json,
                    server_name,
                    verify_keys[key_id]
                )

        signed_key_json = sign_json(
            response_json,
            self.config.server_name,
            self.config.signing_key[0],
        )

        signed_key_json_bytes = encode_canonical_json(signed_key_json)
        ts_valid_until_ms = signed_key_json[u"valid_until_ts"]

        updated_key_ids = set(requested_ids)
        updated_key_ids.update(verify_keys)
        updated_key_ids.update(old_verify_keys)

        response_keys.update(verify_keys)
        response_keys.update(old_verify_keys)

        yield defer.gatherResults(
            [
                preserve_fn(self.store.store_server_keys_json)(
                    server_name=server_name,
                    key_id=key_id,
                    from_server=server_name,
                    ts_now_ms=time_now_ms,
                    ts_expires_ms=ts_valid_until_ms,
                    key_json_bytes=signed_key_json_bytes,
                )
                for key_id in updated_key_ids
            ],
            consumeErrors=True,
        ).addErrback(unwrapFirstError)

        results[server_name] = response_keys

        defer.returnValue(results)
Exemple #43
0
    def search(self, user, content, batch=None):
        """Performs a full text search for a user.

        Args:
            user (UserID)
            content (dict): Search parameters
            batch (str): The next_batch parameter. Used for pagination.

        Returns:
            dict to be returned to the client with results of search
        """

        if not self.hs.config.enable_search:
            raise SynapseError(400, "Search is disabled on this homeserver")

        batch_group = None
        batch_group_key = None
        batch_token = None
        if batch:
            try:
                b = decode_base64(batch).decode('ascii')
                batch_group, batch_group_key, batch_token = b.split("\n")

                assert batch_group is not None
                assert batch_group_key is not None
                assert batch_token is not None
            except Exception:
                raise SynapseError(400, "Invalid batch")

        logger.info(
            "Search batch properties: %r, %r, %r",
            batch_group, batch_group_key, batch_token,
        )

        logger.info("Search content: %s", content)

        try:
            room_cat = content["search_categories"]["room_events"]

            # The actual thing to query in FTS
            search_term = room_cat["search_term"]

            # Which "keys" to search over in FTS query
            keys = room_cat.get("keys", [
                "content.body", "content.name", "content.topic",
            ])

            # Filter to apply to results
            filter_dict = room_cat.get("filter", {})

            # What to order results by (impacts whether pagination can be doen)
            order_by = room_cat.get("order_by", "rank")

            # Return the current state of the rooms?
            include_state = room_cat.get("include_state", False)

            # Include context around each event?
            event_context = room_cat.get(
                "event_context", None
            )

            # Group results together? May allow clients to paginate within a
            # group
            group_by = room_cat.get("groupings", {}).get("group_by", {})
            group_keys = [g["key"] for g in group_by]

            if event_context is not None:
                before_limit = int(event_context.get(
                    "before_limit", 5
                ))
                after_limit = int(event_context.get(
                    "after_limit", 5
                ))

                # Return the historic display name and avatar for the senders
                # of the events?
                include_profile = bool(event_context.get("include_profile", False))
        except KeyError:
            raise SynapseError(400, "Invalid search query")

        if order_by not in ("rank", "recent"):
            raise SynapseError(400, "Invalid order by: %r" % (order_by,))

        if set(group_keys) - {"room_id", "sender"}:
            raise SynapseError(
                400,
                "Invalid group by keys: %r" % (set(group_keys) - {"room_id", "sender"},)
            )

        search_filter = Filter(filter_dict)

        # TODO: Search through left rooms too
        rooms = yield self.store.get_rooms_for_user_where_membership_is(
            user.to_string(),
            membership_list=[Membership.JOIN],
            # membership_list=[Membership.JOIN, Membership.LEAVE, Membership.Ban],
        )
        room_ids = set(r.room_id for r in rooms)

        # If doing a subset of all rooms seearch, check if any of the rooms
        # are from an upgraded room, and search their contents as well
        if search_filter.rooms:
            historical_room_ids = []
            for room_id in search_filter.rooms:
                # Add any previous rooms to the search if they exist
                ids = yield self.get_old_rooms_from_upgraded_room(room_id)
                historical_room_ids += ids

            # Prevent any historical events from being filtered
            search_filter = search_filter.with_room_ids(historical_room_ids)

        room_ids = search_filter.filter_rooms(room_ids)

        if batch_group == "room_id":
            room_ids.intersection_update({batch_group_key})

        if not room_ids:
            defer.returnValue({
                "search_categories": {
                    "room_events": {
                        "results": [],
                        "count": 0,
                        "highlights": [],
                    }
                }
            })

        rank_map = {}  # event_id -> rank of event
        allowed_events = []
        room_groups = {}  # Holds result of grouping by room, if applicable
        sender_group = {}  # Holds result of grouping by sender, if applicable

        # Holds the next_batch for the entire result set if one of those exists
        global_next_batch = None

        highlights = set()

        count = None

        if order_by == "rank":
            search_result = yield self.store.search_msgs(
                room_ids, search_term, keys
            )

            count = search_result["count"]

            if search_result["highlights"]:
                highlights.update(search_result["highlights"])

            results = search_result["results"]

            results_map = {r["event"].event_id: r for r in results}

            rank_map.update({r["event"].event_id: r["rank"] for r in results})

            filtered_events = search_filter.filter([r["event"] for r in results])

            events = yield filter_events_for_client(
                self.store, user.to_string(), filtered_events
            )

            events.sort(key=lambda e: -rank_map[e.event_id])
            allowed_events = events[:search_filter.limit()]

            for e in allowed_events:
                rm = room_groups.setdefault(e.room_id, {
                    "results": [],
                    "order": rank_map[e.event_id],
                })
                rm["results"].append(e.event_id)

                s = sender_group.setdefault(e.sender, {
                    "results": [],
                    "order": rank_map[e.event_id],
                })
                s["results"].append(e.event_id)

        elif order_by == "recent":
            room_events = []
            i = 0

            pagination_token = batch_token

            # We keep looping and we keep filtering until we reach the limit
            # or we run out of things.
            # But only go around 5 times since otherwise synapse will be sad.
            while len(room_events) < search_filter.limit() and i < 5:
                i += 1
                search_result = yield self.store.search_rooms(
                    room_ids, search_term, keys, search_filter.limit() * 2,
                    pagination_token=pagination_token,
                )

                if search_result["highlights"]:
                    highlights.update(search_result["highlights"])

                count = search_result["count"]

                results = search_result["results"]

                results_map = {r["event"].event_id: r for r in results}

                rank_map.update({r["event"].event_id: r["rank"] for r in results})

                filtered_events = search_filter.filter([
                    r["event"] for r in results
                ])

                events = yield filter_events_for_client(
                    self.store, user.to_string(), filtered_events
                )

                room_events.extend(events)
                room_events = room_events[:search_filter.limit()]

                if len(results) < search_filter.limit() * 2:
                    pagination_token = None
                    break
                else:
                    pagination_token = results[-1]["pagination_token"]

            for event in room_events:
                group = room_groups.setdefault(event.room_id, {
                    "results": [],
                })
                group["results"].append(event.event_id)

            if room_events and len(room_events) >= search_filter.limit():
                last_event_id = room_events[-1].event_id
                pagination_token = results_map[last_event_id]["pagination_token"]

                # We want to respect the given batch group and group keys so
                # that if people blindly use the top level `next_batch` token
                # it returns more from the same group (if applicable) rather
                # than reverting to searching all results again.
                if batch_group and batch_group_key:
                    global_next_batch = encode_base64(("%s\n%s\n%s" % (
                        batch_group, batch_group_key, pagination_token
                    )).encode('ascii'))
                else:
                    global_next_batch = encode_base64(("%s\n%s\n%s" % (
                        "all", "", pagination_token
                    )).encode('ascii'))

                for room_id, group in room_groups.items():
                    group["next_batch"] = encode_base64(("%s\n%s\n%s" % (
                        "room_id", room_id, pagination_token
                    )).encode('ascii'))

            allowed_events.extend(room_events)

        else:
            # We should never get here due to the guard earlier.
            raise NotImplementedError()

        logger.info("Found %d events to return", len(allowed_events))

        # If client has asked for "context" for each event (i.e. some surrounding
        # events and state), fetch that
        if event_context is not None:
            now_token = yield self.hs.get_event_sources().get_current_token()

            contexts = {}
            for event in allowed_events:
                res = yield self.store.get_events_around(
                    event.room_id, event.event_id, before_limit, after_limit,
                )

                logger.info(
                    "Context for search returned %d and %d events",
                    len(res["events_before"]), len(res["events_after"]),
                )

                res["events_before"] = yield filter_events_for_client(
                    self.store, user.to_string(), res["events_before"]
                )

                res["events_after"] = yield filter_events_for_client(
                    self.store, user.to_string(), res["events_after"]
                )

                res["start"] = now_token.copy_and_replace(
                    "room_key", res["start"]
                ).to_string()

                res["end"] = now_token.copy_and_replace(
                    "room_key", res["end"]
                ).to_string()

                if include_profile:
                    senders = set(
                        ev.sender
                        for ev in itertools.chain(
                            res["events_before"], [event], res["events_after"]
                        )
                    )

                    if res["events_after"]:
                        last_event_id = res["events_after"][-1].event_id
                    else:
                        last_event_id = event.event_id

                    state_filter = StateFilter.from_types(
                        [(EventTypes.Member, sender) for sender in senders]
                    )

                    state = yield self.store.get_state_for_event(
                        last_event_id, state_filter
                    )

                    res["profile_info"] = {
                        s.state_key: {
                            "displayname": s.content.get("displayname", None),
                            "avatar_url": s.content.get("avatar_url", None),
                        }
                        for s in state.values()
                        if s.type == EventTypes.Member and s.state_key in senders
                    }

                contexts[event.event_id] = res
        else:
            contexts = {}

        # TODO: Add a limit

        time_now = self.clock.time_msec()

        for context in contexts.values():
            context["events_before"] = (
                yield self._event_serializer.serialize_events(
                    context["events_before"], time_now,
                )
            )
            context["events_after"] = (
                yield self._event_serializer.serialize_events(
                    context["events_after"], time_now,
                )
            )

        state_results = {}
        if include_state:
            rooms = set(e.room_id for e in allowed_events)
            for room_id in rooms:
                state = yield self.state_handler.get_current_state(room_id)
                state_results[room_id] = list(state.values())

            state_results.values()

        # We're now about to serialize the events. We should not make any
        # blocking calls after this. Otherwise the 'age' will be wrong

        results = []
        for e in allowed_events:
            results.append({
                "rank": rank_map[e.event_id],
                "result": (yield self._event_serializer.serialize_event(e, time_now)),
                "context": contexts.get(e.event_id, {}),
            })

        rooms_cat_res = {
            "results": results,
            "count": count,
            "highlights": list(highlights),
        }

        if state_results:
            s = {}
            for room_id, state in state_results.items():
                s[room_id] = yield self._event_serializer.serialize_events(
                    state, time_now,
                )

            rooms_cat_res["state"] = s

        if room_groups and "room_id" in group_keys:
            rooms_cat_res.setdefault("groups", {})["room_id"] = room_groups

        if sender_group and "sender" in group_keys:
            rooms_cat_res.setdefault("groups", {})["sender"] = sender_group

        if global_next_batch:
            rooms_cat_res["next_batch"] = global_next_batch

        defer.returnValue({
            "search_categories": {
                "room_events": rooms_cat_res
            }
        })
Exemple #44
0
    def get_server_verify_key_v1_direct(self, server_name, key_ids):
        """Finds a verification key for the server with one of the key ids.
        Args:
            server_name (str): The name of the server to fetch a key for.
            keys_ids (list of str): The key_ids to check for.
        """

        # Try to fetch the key from the remote server.

        (response, tls_certificate) = yield fetch_server_key(
            server_name, self.hs.tls_server_context_factory
        )

        # Check the response.

        x509_certificate_bytes = crypto.dump_certificate(
            crypto.FILETYPE_ASN1, tls_certificate
        )

        if ("signatures" not in response
                or server_name not in response["signatures"]):
            raise ValueError("Key response not signed by remote server")

        if "tls_certificate" not in response:
            raise ValueError("Key response missing TLS certificate")

        tls_certificate_b64 = response["tls_certificate"]

        if encode_base64(x509_certificate_bytes) != tls_certificate_b64:
            raise ValueError("TLS certificate doesn't match")

        # Cache the result in the datastore.

        time_now_ms = self.clock.time_msec()

        verify_keys = {}
        for key_id, key_base64 in response["verify_keys"].items():
            if is_signing_algorithm_supported(key_id):
                key_bytes = decode_base64(key_base64)
                verify_key = decode_verify_key_bytes(key_id, key_bytes)
                verify_key.time_added = time_now_ms
                verify_keys[key_id] = verify_key

        for key_id in response["signatures"][server_name]:
            if key_id not in response["verify_keys"]:
                raise ValueError(
                    "Key response must include verification keys for all"
                    " signatures"
                )
            if key_id in verify_keys:
                verify_signed_json(
                    response,
                    server_name,
                    verify_keys[key_id]
                )

        yield self.store.store_server_certificate(
            server_name,
            server_name,
            time_now_ms,
            tls_certificate,
        )

        yield self.store_keys(
            server_name=server_name,
            from_server=server_name,
            verify_keys=verify_keys,
        )

        defer.returnValue(verify_keys)
 def test_decode_urlunsafe_chars(self):
     self.assertEqual(decode_base64(u'/+aa'), b'\xff\xe6\x9a')
     self.assertEqual(decode_base64(u'_-aa'), b'\xff\xe6\x9a')
Exemple #46
0
def select_pdus(cursor):
    cursor.execute(
        "SELECT pdu_id, origin FROM pdus ORDER BY depth ASC"
    )

    ids = cursor.fetchall()

    pdu_tuples = store._get_pdu_tuples(cursor, ids)

    pdus = [Pdu.from_pdu_tuple(p) for p in pdu_tuples]

    reference_hashes = {}

    for pdu in pdus:
        try:
            if pdu.prev_pdus:
                print "PROCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus
                for pdu_id, origin, hashes in pdu.prev_pdus:
                    ref_alg, ref_hsh = reference_hashes[(pdu_id, origin)]
                    hashes[ref_alg] = encode_base64(ref_hsh)
                    store._store_prev_pdu_hash_txn(cursor,  pdu.pdu_id, pdu.origin, pdu_id, origin, ref_alg, ref_hsh)
                print "SUCCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus
            pdu = add_event_pdu_content_hash(pdu)
            ref_alg, ref_hsh = compute_pdu_event_reference_hash(pdu)
            reference_hashes[(pdu.pdu_id, pdu.origin)] = (ref_alg, ref_hsh)
            store._store_pdu_reference_hash_txn(cursor, pdu.pdu_id, pdu.origin, ref_alg, ref_hsh)

            for alg, hsh_base64 in pdu.hashes.items():
                print alg, hsh_base64
                store._store_pdu_content_hash_txn(cursor, pdu.pdu_id, pdu.origin, alg, decode_base64(hsh_base64))

        except:
            print "FAILED_", pdu.pdu_id, pdu.origin, pdu.prev_pdus
Exemple #47
0
    def search(self, user, content, batch=None):
        """Performs a full text search for a user.

        Args:
            user (UserID)
            content (dict): Search parameters
            batch (str): The next_batch parameter. Used for pagination.

        Returns:
            dict to be returned to the client with results of search
        """

        batch_group = None
        batch_group_key = None
        batch_token = None
        if batch:
            try:
                b = decode_base64(batch)
                batch_group, batch_group_key, batch_token = b.split("\n")

                assert batch_group is not None
                assert batch_group_key is not None
                assert batch_token is not None
            except:
                raise SynapseError(400, "Invalid batch")

        try:
            room_cat = content["search_categories"]["room_events"]

            # The actual thing to query in FTS
            search_term = room_cat["search_term"]

            # Which "keys" to search over in FTS query
            keys = room_cat.get("keys", [
                "content.body", "content.name", "content.topic",
            ])

            # Filter to apply to results
            filter_dict = room_cat.get("filter", {})

            # What to order results by (impacts whether pagination can be doen)
            order_by = room_cat.get("order_by", "rank")

            # Return the current state of the rooms?
            include_state = room_cat.get("include_state", False)

            # Include context around each event?
            event_context = room_cat.get(
                "event_context", None
            )

            # Group results together? May allow clients to paginate within a
            # group
            group_by = room_cat.get("groupings", {}).get("group_by", {})
            group_keys = [g["key"] for g in group_by]

            if event_context is not None:
                before_limit = int(event_context.get(
                    "before_limit", 5
                ))
                after_limit = int(event_context.get(
                    "after_limit", 5
                ))

                # Return the historic display name and avatar for the senders
                # of the events?
                include_profile = bool(event_context.get("include_profile", False))
        except KeyError:
            raise SynapseError(400, "Invalid search query")

        if order_by not in ("rank", "recent"):
            raise SynapseError(400, "Invalid order by: %r" % (order_by,))

        if set(group_keys) - {"room_id", "sender"}:
            raise SynapseError(
                400,
                "Invalid group by keys: %r" % (set(group_keys) - {"room_id", "sender"},)
            )

        search_filter = Filter(filter_dict)

        # TODO: Search through left rooms too
        rooms = yield self.store.get_rooms_for_user_where_membership_is(
            user.to_string(),
            membership_list=[Membership.JOIN],
            # membership_list=[Membership.JOIN, Membership.LEAVE, Membership.Ban],
        )
        room_ids = set(r.room_id for r in rooms)

        room_ids = search_filter.filter_rooms(room_ids)

        if batch_group == "room_id":
            room_ids.intersection_update({batch_group_key})

        rank_map = {}  # event_id -> rank of event
        allowed_events = []
        room_groups = {}  # Holds result of grouping by room, if applicable
        sender_group = {}  # Holds result of grouping by sender, if applicable

        # Holds the next_batch for the entire result set if one of those exists
        global_next_batch = None

        if order_by == "rank":
            results = yield self.store.search_msgs(
                room_ids, search_term, keys
            )

            results_map = {r["event"].event_id: r for r in results}

            rank_map.update({r["event"].event_id: r["rank"] for r in results})

            filtered_events = search_filter.filter([r["event"] for r in results])

            events = yield self._filter_events_for_client(
                user.to_string(), filtered_events
            )

            events.sort(key=lambda e: -rank_map[e.event_id])
            allowed_events = events[:search_filter.limit()]

            for e in allowed_events:
                rm = room_groups.setdefault(e.room_id, {
                    "results": [],
                    "order": rank_map[e.event_id],
                })
                rm["results"].append(e.event_id)

                s = sender_group.setdefault(e.sender, {
                    "results": [],
                    "order": rank_map[e.event_id],
                })
                s["results"].append(e.event_id)

        elif order_by == "recent":
            # In this case we specifically loop through each room as the given
            # limit applies to each room, rather than a global list.
            # This is not necessarilly a good idea.
            for room_id in room_ids:
                room_events = []
                if batch_group == "room_id" and batch_group_key == room_id:
                    pagination_token = batch_token
                else:
                    pagination_token = None
                i = 0

                # We keep looping and we keep filtering until we reach the limit
                # or we run out of things.
                # But only go around 5 times since otherwise synapse will be sad.
                while len(room_events) < search_filter.limit() and i < 5:
                    i += 1
                    results = yield self.store.search_room(
                        room_id, search_term, keys, search_filter.limit() * 2,
                        pagination_token=pagination_token,
                    )

                    results_map = {r["event"].event_id: r for r in results}

                    rank_map.update({r["event"].event_id: r["rank"] for r in results})

                    filtered_events = search_filter.filter([
                        r["event"] for r in results
                    ])

                    events = yield self._filter_events_for_client(
                        user.to_string(), filtered_events
                    )

                    room_events.extend(events)
                    room_events = room_events[:search_filter.limit()]

                    if len(results) < search_filter.limit() * 2:
                        pagination_token = None
                        break
                    else:
                        pagination_token = results[-1]["pagination_token"]

                if room_events:
                    res = results_map[room_events[-1].event_id]
                    pagination_token = res["pagination_token"]

                    group = room_groups.setdefault(room_id, {})
                    if pagination_token:
                        next_batch = encode_base64("%s\n%s\n%s" % (
                            "room_id", room_id, pagination_token
                        ))
                        group["next_batch"] = next_batch

                        if batch_token:
                            global_next_batch = next_batch

                    group["results"] = [e.event_id for e in room_events]
                    group["order"] = max(
                        e.origin_server_ts/1000 for e in room_events
                        if hasattr(e, "origin_server_ts")
                    )

                allowed_events.extend(room_events)

            # Normalize the group orders
            if room_groups:
                if len(room_groups) > 1:
                    mx = max(g["order"] for g in room_groups.values())
                    mn = min(g["order"] for g in room_groups.values())

                    for g in room_groups.values():
                        g["order"] = (g["order"] - mn) * 1.0 / (mx - mn)
                else:
                    room_groups.values()[0]["order"] = 1

        else:
            # We should never get here due to the guard earlier.
            raise NotImplementedError()

        # If client has asked for "context" for each event (i.e. some surrounding
        # events and state), fetch that
        if event_context is not None:
            now_token = yield self.hs.get_event_sources().get_current_token()

            contexts = {}
            for event in allowed_events:
                res = yield self.store.get_events_around(
                    event.room_id, event.event_id, before_limit, after_limit
                )

                res["events_before"] = yield self._filter_events_for_client(
                    user.to_string(), res["events_before"]
                )

                res["events_after"] = yield self._filter_events_for_client(
                    user.to_string(), res["events_after"]
                )

                res["start"] = now_token.copy_and_replace(
                    "room_key", res["start"]
                ).to_string()

                res["end"] = now_token.copy_and_replace(
                    "room_key", res["end"]
                ).to_string()

                if include_profile:
                    senders = set(
                        ev.sender
                        for ev in itertools.chain(
                            res["events_before"], [event], res["events_after"]
                        )
                    )

                    if res["events_after"]:
                        last_event_id = res["events_after"][-1].event_id
                    else:
                        last_event_id = event.event_id

                    state = yield self.store.get_state_for_event(
                        last_event_id,
                        types=[(EventTypes.Member, sender) for sender in senders]
                    )

                    res["profile_info"] = {
                        s.state_key: {
                            "displayname": s.content.get("displayname", None),
                            "avatar_url": s.content.get("avatar_url", None),
                        }
                        for s in state.values()
                        if s.type == EventTypes.Member and s.state_key in senders
                    }

                contexts[event.event_id] = res
        else:
            contexts = {}

        # TODO: Add a limit

        time_now = self.clock.time_msec()

        for context in contexts.values():
            context["events_before"] = [
                serialize_event(e, time_now)
                for e in context["events_before"]
            ]
            context["events_after"] = [
                serialize_event(e, time_now)
                for e in context["events_after"]
            ]

        state_results = {}
        if include_state:
            rooms = set(e.room_id for e in allowed_events)
            for room_id in rooms:
                state = yield self.state_handler.get_current_state(room_id)
                state_results[room_id] = state.values()

            state_results.values()

        # We're now about to serialize the events. We should not make any
        # blocking calls after this. Otherwise the 'age' will be wrong

        results = {
            e.event_id: {
                "rank": rank_map[e.event_id],
                "result": serialize_event(e, time_now),
                "context": contexts.get(e.event_id, {}),
            }
            for e in allowed_events
        }

        logger.info("Found %d results", len(results))

        rooms_cat_res = {
            "results": results,
            "count": len(results)
        }

        if state_results:
            rooms_cat_res["state"] = {
                room_id: [serialize_event(e, time_now) for e in state]
                for room_id, state in state_results.items()
            }

        if room_groups and "room_id" in group_keys:
            rooms_cat_res.setdefault("groups", {})["room_id"] = room_groups

        if sender_group and "sender" in group_keys:
            rooms_cat_res.setdefault("groups", {})["sender"] = sender_group

        if global_next_batch:
            rooms_cat_res["next_batch"] = global_next_batch

        defer.returnValue({
            "search_categories": {
                "room_events": rooms_cat_res
            }
        })
Exemple #48
0
    def process_v2_response(
        self, from_server, response_json, requested_ids=[],
    ):
        """Parse a 'Server Keys' structure from the result of a /key request

        This is used to parse either the entirety of the response from
        GET /_matrix/key/v2/server, or a single entry from the list returned by
        POST /_matrix/key/v2/query.

        Checks that each signature in the response that claims to come from the origin
        server is valid. (Does not check that there actually is such a signature, for
        some reason.)

        Stores the json in server_keys_json so that it can be used for future responses
        to /_matrix/key/v2/query.

        Args:
            from_server (str): the name of the server producing this result: either
                the origin server for a /_matrix/key/v2/server request, or the notary
                for a /_matrix/key/v2/query.

            response_json (dict): the json-decoded Server Keys response object

            requested_ids (iterable[str]): a list of the key IDs that were requested.
                We will store the json for these key ids as well as any that are
                actually in the response

        Returns:
            Deferred[dict[str, nacl.signing.VerifyKey]]:
                map from key_id to key object
        """
        time_now_ms = self.clock.time_msec()
        response_keys = {}
        verify_keys = {}
        for key_id, key_data in response_json["verify_keys"].items():
            if is_signing_algorithm_supported(key_id):
                key_base64 = key_data["key"]
                key_bytes = decode_base64(key_base64)
                verify_key = decode_verify_key_bytes(key_id, key_bytes)
                verify_key.time_added = time_now_ms
                verify_keys[key_id] = verify_key

        old_verify_keys = {}
        for key_id, key_data in response_json["old_verify_keys"].items():
            if is_signing_algorithm_supported(key_id):
                key_base64 = key_data["key"]
                key_bytes = decode_base64(key_base64)
                verify_key = decode_verify_key_bytes(key_id, key_bytes)
                verify_key.expired = key_data["expired_ts"]
                verify_key.time_added = time_now_ms
                old_verify_keys[key_id] = verify_key

        server_name = response_json["server_name"]
        for key_id in response_json["signatures"].get(server_name, {}):
            if key_id not in response_json["verify_keys"]:
                raise KeyLookupError(
                    "Key response must include verification keys for all"
                    " signatures"
                )
            if key_id in verify_keys:
                verify_signed_json(
                    response_json,
                    server_name,
                    verify_keys[key_id]
                )

        signed_key_json = sign_json(
            response_json,
            self.config.server_name,
            self.config.signing_key[0],
        )

        signed_key_json_bytes = encode_canonical_json(signed_key_json)
        ts_valid_until_ms = signed_key_json[u"valid_until_ts"]

        updated_key_ids = set(requested_ids)
        updated_key_ids.update(verify_keys)
        updated_key_ids.update(old_verify_keys)

        response_keys.update(verify_keys)
        response_keys.update(old_verify_keys)

        yield logcontext.make_deferred_yieldable(defer.gatherResults(
            [
                run_in_background(
                    self.store.store_server_keys_json,
                    server_name=server_name,
                    key_id=key_id,
                    from_server=from_server,
                    ts_now_ms=time_now_ms,
                    ts_expires_ms=ts_valid_until_ms,
                    key_json_bytes=signed_key_json_bytes,
                )
                for key_id in updated_key_ids
            ],
            consumeErrors=True,
        ).addErrback(unwrapFirstError))

        defer.returnValue(response_keys)

from tests import unittest

from synapse.events.builder import EventBuilder
from synapse.crypto.event_signing import add_hashes_and_signatures

from unpaddedbase64 import decode_base64

import nacl.signing


# Perform these tests using given secret key so we get entirely deterministic
# signatures output that we can test against.
SIGNING_KEY_SEED = decode_base64(
    "YJDBA9Xnr2sVqXD9Vj7XVUnmFZcZrlw8Md7kMW+3XA1"
)

KEY_ALG = "ed25519"
KEY_VER = 1
KEY_NAME = "%s:%d" % (KEY_ALG, KEY_VER)

HOSTNAME = "domain"


class EventSigningTestCase(unittest.TestCase):

    def setUp(self):
        self.signing_key = nacl.signing.SigningKey(SIGNING_KEY_SEED)
        self.signing_key.alg = KEY_ALG
        self.signing_key.version = KEY_VER
Exemple #50
0
 def from_token(cls, token):
     return RoomListNextBatch(**{
         cls.REVERSE_KEY_DICT[key]: val
         for key, val in msgpack.loads(decode_base64(token)).items()
     })