def response_json_object(self): verify_keys = {} for key in self.config.signing_key: verify_key_bytes = key.verify_key.encode() key_id = "%s:%s" % (key.alg, key.version) verify_keys[key_id] = { u"key": encode_base64(verify_key_bytes) } old_verify_keys = {} for key_id, key in self.config.old_signing_keys.items(): verify_key_bytes = key.encode() old_verify_keys[key_id] = { u"key": encode_base64(verify_key_bytes), u"expired_ts": key.expired_ts, } tls_fingerprints = self.config.tls_fingerprints json_object = { u"valid_until_ts": self.valid_until_ts, u"server_name": self.config.server_name, u"verify_keys": verify_keys, u"old_verify_keys": old_verify_keys, u"tls_fingerprints": tls_fingerprints, } for key in self.config.signing_key: json_object = sign_json( json_object, self.config.server_name, key, ) return json_object
def compute_event_signature(event, signature_name, signing_key): tmp_event = prune_event(event) redact_json = tmp_event.get_pdu_json() redact_json.pop("age_ts", None) redact_json.pop("unsigned", None) logger.debug("Signing event: %s", encode_canonical_json(redact_json)) redact_json = sign_json(redact_json, signature_name, signing_key) logger.debug("Signed event: %s", encode_canonical_json(redact_json)) return redact_json["signatures"]
def test_sign_minimal(self): self.assertEquals( sign_json({}, "domain", self.signing_key), { 'signatures': { 'domain': { KEY_NAME: "K8280/U9SSy9IVtjBuVeLr+HpOB4BQFWbg+UZaADMt" "TdGYI7Geitb76LTrr5QV/7Xg4ahLwYGYZzuHGZKM5ZAQ" }, } } )
def create_attestation(self, group_id, user_id): """Create an attestation for the group_id and user_id with default validity length. """ validity_period = DEFAULT_ATTESTATION_LENGTH_MS validity_period *= random.uniform(*DEFAULT_ATTESTATION_JITTER) valid_until_ms = int(self.clock.time_msec() + validity_period) return sign_json({ "group_id": group_id, "user_id": user_id, "valid_until_ms": valid_until_ms, }, self.server_name, self.signing_key)
def sign_request(self, destination, method, url_bytes, headers_dict, content=None): request = {"method": method, "uri": url_bytes, "origin": self.server_name, "destination": destination} if content is not None: request["content"] = content request = sign_json(request, self.server_name, self.signing_key) auth_headers = [] for key, sig in request["signatures"][self.server_name].items(): auth_headers.append(bytes('X-Matrix origin=%s,key="%s",sig="%s"' % (self.server_name, key, sig))) headers_dict[b"Authorization"] = auth_headers
def test_sign_with_data(self): self.assertEquals( sign_json({'one': 1, 'two': "Two"}, "domain", self.signing_key), { 'one': 1, 'two': "Two", 'signatures': { 'domain': { KEY_NAME: "KqmLSbO39/Bzb0QIYE82zqLwsA+PDzYIpIRA2sRQ4s" "L53+sN6/fpNSoqE7BP7vBZhG6kYdD13EIMJpvhJI+6Bw" }, } } )
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: """Searches for users in directory Returns: dict of the form:: { "limited": <bool>, # whether there were more results or not "results": [ # Ordered by best match first { "user_id": <user_id>, "display_name": <display_name>, "avatar_url": <avatar_url> } ] } """ requester = await self.auth.get_user_by_req(request, allow_guest=False) user_id = requester.user.to_string() if not self.hs.config.userdirectory.user_directory_search_enabled: return 200, {"limited": False, "results": []} body = parse_json_object_from_request(request) if self.hs.config.userdirectory.user_directory_defer_to_id_server: signed_body = sign_json( body, self.hs.hostname, self.hs.config.key.signing_key[0] ) url = "%s/_matrix/identity/api/v1/user_directory/search" % ( self.hs.config.userdirectory.user_directory_defer_to_id_server, ) resp = await self.http_client.post_json_get_json(url, signed_body) return 200, resp limit = body.get("limit", 10) limit = min(limit, 50) try: search_term = body["search_term"] except Exception: raise SynapseError(400, "`search_term` is required field") results = await self.user_directory_handler.search_users( user_id, search_term, limit ) return 200, results
def sign_request(self, destination, method, url_bytes, headers_dict, content=None, destination_is=None): """ Signs a request by adding an Authorization header to headers_dict Args: destination (bytes|None): The desination home server of the request. May be None if the destination is an identity server, in which case destination_is must be non-None. method (bytes): The HTTP method of the request url_bytes (bytes): The URI path of the request headers_dict (dict[bytes, list[bytes]]): Dictionary of request headers to append to content (object): The body of the request destination_is (bytes): As 'destination', but if the destination is an identity server Returns: None """ request = { "method": method, "uri": url_bytes, "origin": self.server_name, } if destination is not None: request["destination"] = destination if destination_is is not None: request["destination_is"] = destination_is if content is not None: request["content"] = content request = sign_json(request, self.server_name, self.signing_key) auth_headers = [] for key, sig in request["signatures"][self.server_name].items(): auth_headers.append(( "X-Matrix origin=%s,key=\"%s\",sig=\"%s\"" % ( self.server_name, key, sig, )).encode('ascii') ) headers_dict[b"Authorization"] = auth_headers
def build_auth_headers( self, destination: Optional[bytes], method: bytes, url_bytes: bytes, content: Optional[JsonDict] = None, destination_is: Optional[bytes] = None, ) -> List[bytes]: """ Builds the Authorization headers for a federation request Args: destination: The destination homeserver of the request. May be None if the destination is an identity server, in which case destination_is must be non-None. method: The HTTP method of the request url_bytes: The URI path of the request content: The body of the request destination_is: As 'destination', but if the destination is an identity server Returns: A list of headers to be added as "Authorization:" headers """ request = { "method": method.decode("ascii"), "uri": url_bytes.decode("ascii"), "origin": self.server_name, } if destination is not None: request["destination"] = destination.decode("ascii") if destination_is is not None: request["destination_is"] = destination_is.decode("ascii") if content is not None: request["content"] = content request = sign_json(request, self.server_name, self.signing_key) auth_headers = [] for key, sig in request["signatures"][self.server_name].items(): auth_headers.append(('X-Matrix origin=%s,key="%s",sig="%s"' % (self.server_name, key, sig)).encode("ascii")) return auth_headers
def create_attestation(self, group_id, user_id): """Create an attestation for the group_id and user_id with default validity length. """ validity_period = DEFAULT_ATTESTATION_LENGTH_MS validity_period *= random.uniform(*DEFAULT_ATTESTATION_JITTER) valid_until_ms = int(self.clock.time_msec() + validity_period) return sign_json( { "group_id": group_id, "user_id": user_id, "valid_until_ms": valid_until_ms, }, self.server_name, self.signing_key, )
def build_auth_headers( self, destination, method, url_bytes, content=None, destination_is=None, ): """ Builds the Authorization headers for a federation request Args: destination (bytes|None): The desination home server of the request. May be None if the destination is an identity server, in which case destination_is must be non-None. method (bytes): The HTTP method of the request url_bytes (bytes): The URI path of the request content (object): The body of the request destination_is (bytes): As 'destination', but if the destination is an identity server Returns: list[bytes]: a list of headers to be added as "Authorization:" headers """ request = { "method": method, "uri": url_bytes, "origin": self.server_name, } if destination is not None: request["destination"] = destination if destination_is is not None: request["destination_is"] = destination_is if content is not None: request["content"] = content request = sign_json(request, self.server_name, self.signing_key) auth_headers = [] for key, sig in request["signatures"][self.server_name].items(): auth_headers.append(( "X-Matrix origin=%s,key=\"%s\",sig=\"%s\"" % ( self.server_name, key, sig, )).encode('ascii') ) return auth_headers
def main(): config = yaml.load(open(sys.argv[1])) valid_until = int(time.time() / (3600 * 24)) * 1000 * 3600 * 24 server_name = config["server_name"] signing_key = read_signing_keys(open(config["signing_key_path"]))[0] database = config["database"] assert database["name"] == "psycopg2", "Can only convert for postgresql" args = database["args"] args.pop("cp_max") args.pop("cp_min") connection = psycopg2.connect(**args) keys = select_v1_keys(connection) certificates = select_v1_certs(connection) json = select_v2_json(connection) result = {} for server in keys: if not server in json: v2_json = convert_v1_to_v2( server, valid_until, keys[server], certificates[server] ) v2_json = sign_json(v2_json, server_name, signing_key) result[server] = v2_json yaml.safe_dump(result, sys.stdout, default_flow_style=False) rows = list( row for server, json in result.items() for row in rows_v2(server, json) ) cursor = connection.cursor() cursor.executemany( "INSERT INTO server_keys_json (" " server_name, key_id, from_server," " ts_added_ms, ts_valid_until_ms, key_json" ") VALUES (%s, %s, %s, %s, %s, %s)", rows ) connection.commit()
def compute_event_signature(event_dict, signature_name, signing_key): """Compute the signature of the event for the given name and key. Args: event_dict (dict): The event as a dict signature_name (str): The name of the entity signing the event (typically the server's hostname). signing_key (syutil.crypto.SigningKey): The key to sign with Returns: dict[str, dict[str, str]]: Returns a dictionary in the same format of an event's signatures field. """ redact_json = prune_event_dict(event_dict) redact_json.pop("age_ts", None) redact_json.pop("unsigned", None) logger.debug("Signing event: %s", encode_canonical_json(redact_json)) redact_json = sign_json(redact_json, signature_name, signing_key) logger.debug("Signed event: %s", encode_canonical_json(redact_json)) return redact_json["signatures"]
def response_json_object(self): verify_keys = {} for key in self.config.signing_key: verify_key_bytes = key.verify_key.encode() key_id = "%s:%s" % (key.alg, key.version) verify_keys[key_id] = { u"key": encode_base64(verify_key_bytes) } old_verify_keys = {} for key in self.config.old_signing_keys: key_id = "%s:%s" % (key.alg, key.version) verify_key_bytes = key.encode() old_verify_keys[key_id] = { u"key": encode_base64(verify_key_bytes), u"expired_ts": key.expired, } x509_certificate_bytes = crypto.dump_certificate( crypto.FILETYPE_ASN1, self.config.tls_certificate ) sha256_fingerprint = sha256(x509_certificate_bytes).digest() json_object = { u"valid_until_ts": self.valid_until_ts, u"server_name": self.config.server_name, u"verify_keys": verify_keys, u"old_verify_keys": old_verify_keys, u"tls_fingerprints": [{ u"sha256": encode_base64(sha256_fingerprint), }] } for key in self.config.signing_key: json_object = sign_json( json_object, self.config.server_name, key, ) return json_object
def main(): config = yaml.safe_load(open(sys.argv[1])) valid_until = int(time.time() / (3600 * 24)) * 1000 * 3600 * 24 server_name = config["server_name"] signing_key = read_signing_keys(open(config["signing_key_path"]))[0] database = config["database"] assert database["name"] == "psycopg2", "Can only convert for postgresql" args = database["args"] args.pop("cp_max") args.pop("cp_min") connection = psycopg2.connect(**args) keys = select_v1_keys(connection) certificates = select_v1_certs(connection) json = select_v2_json(connection) result = {} for server in keys: if server not in json: v2_json = convert_v1_to_v2(server, valid_until, keys[server], certificates[server]) v2_json = sign_json(v2_json, server_name, signing_key) result[server] = v2_json yaml.safe_dump(result, sys.stdout, default_flow_style=False) rows = [ row for server, json in result.items() for row in rows_v2(server, json) ] cursor = connection.cursor() cursor.executemany( "INSERT INTO server_keys_json (" " server_name, key_id, from_server," " ts_added_ms, ts_valid_until_ms, key_json" ") VALUES (%s, %s, %s, %s, %s, %s)", rows, ) connection.commit()
def response_json_object(server_config): verify_keys = {} for key in server_config.signing_key: verify_key_bytes = key.verify_key.encode() key_id = "%s:%s" % (key.alg, key.version) verify_keys[key_id] = encode_base64(verify_key_bytes) x509_certificate_bytes = crypto.dump_certificate( crypto.FILETYPE_ASN1, server_config.tls_certificate) json_object = { u"server_name": server_config.server_name, u"verify_keys": verify_keys, u"tls_certificate": encode_base64(x509_certificate_bytes) } for key in server_config.signing_key: json_object = sign_json( json_object, server_config.server_name, key, ) return json_object
def sign_request(self, destination, method, url_bytes, headers_dict, content=None): request = { "method": method, "uri": url_bytes, "origin": self.server_name, "destination": destination, } if content is not None: request["content"] = content request = sign_json(request, self.server_name, self.signing_key) auth_headers = [] for key, sig in request["signatures"][self.server_name].items(): auth_headers.append(bytes( "X-Matrix origin=%s,key=\"%s\",sig=\"%s\"" % ( self.server_name, key, sig, ) )) headers_dict[b"Authorization"] = auth_headers
def response_json_object(server_config): verify_keys = {} for key in server_config.signing_key: verify_key_bytes = key.verify_key.encode() key_id = "%s:%s" % (key.alg, key.version) verify_keys[key_id] = encode_base64(verify_key_bytes) x509_certificate_bytes = crypto.dump_certificate( crypto.FILETYPE_ASN1, server_config.tls_certificate ) json_object = { u"server_name": server_config.server_name, u"verify_keys": verify_keys, u"tls_certificate": encode_base64(x509_certificate_bytes) } for key in server_config.signing_key: json_object = sign_json( json_object, server_config.server_name, key, ) return json_object
def response_json_object(self): verify_keys = {} for key in self.config.signing_key: verify_key_bytes = key.verify_key.encode() key_id = "%s:%s" % (key.alg, key.version) verify_keys[key_id] = { u"key": encode_base64(verify_key_bytes) } old_verify_keys = {} for key_id, key in self.config.old_signing_keys.items(): verify_key_bytes = key.encode() old_verify_keys[key_id] = { u"key": encode_base64(verify_key_bytes), u"expired_ts": key.expired_ts, } if self.config.no_tls: tls_fingerprints = [] else: tls_fingerprints = self.config.tls_fingerprints json_object = { u"valid_until_ts": self.valid_until_ts, u"server_name": self.config.server_name, u"verify_keys": verify_keys, u"old_verify_keys": old_verify_keys, u"tls_fingerprints": tls_fingerprints, } for key in self.config.signing_key: json_object = sign_json( json_object, self.config.server_name, key, ) return json_object
def response_json_object(self): verify_keys = {} for key in self.config.signing_key: verify_key_bytes = key.verify_key.encode() key_id = "%s:%s" % (key.alg, key.version) verify_keys[key_id] = {"key": encode_base64(verify_key_bytes)} old_verify_keys = {} for key_id, key in self.config.old_signing_keys.items(): verify_key_bytes = key.encode() old_verify_keys[key_id] = { "key": encode_base64(verify_key_bytes), "expired_ts": key.expired_ts, } json_object = { "valid_until_ts": self.valid_until_ts, "server_name": self.config.server_name, "verify_keys": verify_keys, "old_verify_keys": old_verify_keys, } for key in self.config.signing_key: json_object = sign_json(json_object, self.config.server_name, key) return json_object
def process_v2_response( self, from_server, response_json, requested_ids=[], ): """Parse a 'Server Keys' structure from the result of a /key request This is used to parse either the entirety of the response from GET /_matrix/key/v2/server, or a single entry from the list returned by POST /_matrix/key/v2/query. Checks that each signature in the response that claims to come from the origin server is valid. (Does not check that there actually is such a signature, for some reason.) Stores the json in server_keys_json so that it can be used for future responses to /_matrix/key/v2/query. Args: from_server (str): the name of the server producing this result: either the origin server for a /_matrix/key/v2/server request, or the notary for a /_matrix/key/v2/query. response_json (dict): the json-decoded Server Keys response object requested_ids (iterable[str]): a list of the key IDs that were requested. We will store the json for these key ids as well as any that are actually in the response Returns: Deferred[dict[str, nacl.signing.VerifyKey]]: map from key_id to key object """ time_now_ms = self.clock.time_msec() response_keys = {} verify_keys = {} for key_id, key_data in response_json["verify_keys"].items(): if is_signing_algorithm_supported(key_id): key_base64 = key_data["key"] key_bytes = decode_base64(key_base64) verify_key = decode_verify_key_bytes(key_id, key_bytes) verify_key.time_added = time_now_ms verify_keys[key_id] = verify_key old_verify_keys = {} for key_id, key_data in response_json["old_verify_keys"].items(): if is_signing_algorithm_supported(key_id): key_base64 = key_data["key"] key_bytes = decode_base64(key_base64) verify_key = decode_verify_key_bytes(key_id, key_bytes) verify_key.expired = key_data["expired_ts"] verify_key.time_added = time_now_ms old_verify_keys[key_id] = verify_key server_name = response_json["server_name"] for key_id in response_json["signatures"].get(server_name, {}): if key_id not in response_json["verify_keys"]: raise KeyLookupError( "Key response must include verification keys for all" " signatures" ) if key_id in verify_keys: verify_signed_json( response_json, server_name, verify_keys[key_id] ) signed_key_json = sign_json( response_json, self.config.server_name, self.config.signing_key[0], ) signed_key_json_bytes = encode_canonical_json(signed_key_json) ts_valid_until_ms = signed_key_json[u"valid_until_ts"] updated_key_ids = set(requested_ids) updated_key_ids.update(verify_keys) updated_key_ids.update(old_verify_keys) response_keys.update(verify_keys) response_keys.update(old_verify_keys) yield logcontext.make_deferred_yieldable(defer.gatherResults( [ run_in_background( self.store.store_server_keys_json, server_name=server_name, key_id=key_id, from_server=from_server, ts_now_ms=time_now_ms, ts_expires_ms=ts_valid_until_ms, key_json_bytes=signed_key_json_bytes, ) for key_id in updated_key_ids ], consumeErrors=True, ).addErrback(unwrapFirstError)) defer.returnValue(response_keys)
def setUp(self): self.message = {'foo': 'bar', 'unsigned': {}} self.sigkey = MockSigningKey() self.assertEqual(self.sigkey.alg, 'mock') self.signed = sign_json(self.message, 'Alice', self.sigkey) self.verkey = MockVerifyKey()
def main() -> None: parser = argparse.ArgumentParser( description="""Adds a signature to a JSON object. Example usage: $ scripts-dev/sign_json.py -N test -k localhost.signing.key "{}" {"signatures":{"test":{"ed25519:a_ZnZh":"LmPnml6iM0iR..."}}} """, formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument( "-N", "--server-name", help="Name to give as the local homeserver. If unspecified, will be " "read from the config file.", ) parser.add_argument( "-k", "--signing-key-path", help="Path to the file containing the private ed25519 key to sign the " "request with.", ) parser.add_argument( "-K", "--signing-key", help="The private ed25519 key to sign the request with.", ) parser.add_argument( "-c", "--config", default="homeserver.yaml", help= ("Path to synapse config file, from which the server name and/or signing " "key path will be read. Ignored if --server-name and --signing-key(-path) " "are both given."), ) parser.add_argument( "--sign-event-room-version", type=str, help= ("Sign the JSON as an event for the given room version, rather than raw JSON. " "This means that we will add a 'hashes' object, and redact the event before " "signing."), ) input_args = parser.add_mutually_exclusive_group() input_args.add_argument("input_data", nargs="?", help="Raw JSON to be signed.") input_args.add_argument( "-i", "--input", type=argparse.FileType("r"), default=sys.stdin, help= ("A file from which to read the JSON to be signed. If neither --input nor " "input_data are given, JSON will be read from stdin."), ) parser.add_argument( "-o", "--output", type=argparse.FileType("w"), default=sys.stdout, help="Where to write the signed JSON. Defaults to stdout.", ) args = parser.parse_args() if not args.server_name or not (args.signing_key_path or args.signing_key): read_args_from_config(args) if args.signing_key: keys = read_signing_keys([args.signing_key]) else: with open(args.signing_key_path) as f: keys = read_signing_keys(f) json_to_sign = args.input_data if json_to_sign is None: json_to_sign = args.input.read() try: obj = json.loads(json_to_sign) except JSONDecodeError as e: print("Unable to parse input as JSON: %s" % e, file=sys.stderr) sys.exit(1) if not isinstance(obj, dict): print("Input json was not an object", file=sys.stderr) sys.exit(1) if args.sign_event_room_version: room_version = KNOWN_ROOM_VERSIONS.get(args.sign_event_room_version) if not room_version: print(f"Unknown room version {args.sign_event_room_version}", file=sys.stderr) sys.exit(1) add_hashes_and_signatures(room_version, obj, args.server_name, keys[0]) else: sign_json(obj, args.server_name, keys[0]) for c in json_encoder.iterencode(obj): args.output.write(c) args.output.write("\n")
def test_reupload_signatures(self) -> None: """re-uploading a signature should not fail""" local_user = "******" + self.hs.hostname keys1 = { "master_key": { # private key: HvQBbU+hc2Zr+JP1sE0XwBe1pfZZEYtJNPJLZJtS+F8 "user_id": local_user, "usage": ["master"], "keys": { "ed25519:EmkqvokUn8p+vQAGZitOk4PWjp7Ukp3txV2TbMPEiBQ": "EmkqvokUn8p+vQAGZitOk4PWjp7Ukp3txV2TbMPEiBQ" }, }, "self_signing_key": { # private key: 2lonYOM6xYKdEsO+6KrC766xBcHnYnim1x/4LFGF8B0 "user_id": local_user, "usage": ["self_signing"], "keys": { "ed25519:nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unI9kDYcHwk": "nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unI9kDYcHwk" }, }, } master_signing_key = key.decode_signing_key_base64( "ed25519", "EmkqvokUn8p+vQAGZitOk4PWjp7Ukp3txV2TbMPEiBQ", "HvQBbU+hc2Zr+JP1sE0XwBe1pfZZEYtJNPJLZJtS+F8", ) sign.sign_json(keys1["self_signing_key"], local_user, master_signing_key) signing_key = key.decode_signing_key_base64( "ed25519", "nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unI9kDYcHwk", "2lonYOM6xYKdEsO+6KrC766xBcHnYnim1x/4LFGF8B0", ) self.get_success( self.handler.upload_signing_keys_for_user(local_user, keys1)) # upload two device keys, which will be signed later by the self-signing key device_key_1 = { "user_id": local_user, "device_id": "abc", "algorithms": [ "m.olm.curve25519-aes-sha2", RoomEncryptionAlgorithms.MEGOLM_V1_AES_SHA2, ], "keys": { "ed25519:abc": "base64+ed25519+key", "curve25519:abc": "base64+curve25519+key", }, "signatures": { local_user: { "ed25519:abc": "base64+signature" } }, } device_key_2 = { "user_id": local_user, "device_id": "def", "algorithms": [ "m.olm.curve25519-aes-sha2", RoomEncryptionAlgorithms.MEGOLM_V1_AES_SHA2, ], "keys": { "ed25519:def": "base64+ed25519+key", "curve25519:def": "base64+curve25519+key", }, "signatures": { local_user: { "ed25519:def": "base64+signature" } }, } self.get_success( self.handler.upload_keys_for_user(local_user, "abc", {"device_keys": device_key_1})) self.get_success( self.handler.upload_keys_for_user(local_user, "def", {"device_keys": device_key_2})) # sign the first device key and upload it del device_key_1["signatures"] sign.sign_json(device_key_1, local_user, signing_key) self.get_success( self.handler.upload_signatures_for_device_keys( local_user, {local_user: { "abc": device_key_1 }})) # sign the second device key and upload both device keys. The server # should ignore the first device key since it already has a valid # signature for it del device_key_2["signatures"] sign.sign_json(device_key_2, local_user, signing_key) self.get_success( self.handler.upload_signatures_for_device_keys( local_user, {local_user: { "abc": device_key_1, "def": device_key_2 }})) device_key_1["signatures"][local_user][ "ed25519:abc"] = "base64+signature" device_key_2["signatures"][local_user][ "ed25519:def"] = "base64+signature" devices = self.get_success( self.handler.query_devices({"device_keys": { local_user: [] }}, 0, local_user, "device123")) del devices["device_keys"][local_user]["abc"]["unsigned"] del devices["device_keys"][local_user]["def"]["unsigned"] self.assertDictEqual(devices["device_keys"][local_user]["abc"], device_key_1) self.assertDictEqual(devices["device_keys"][local_user]["def"], device_key_2)
def process_v2_response(self, from_server, response_json, time_added_ms): """Parse a 'Server Keys' structure from the result of a /key request This is used to parse either the entirety of the response from GET /_matrix/key/v2/server, or a single entry from the list returned by POST /_matrix/key/v2/query. Checks that each signature in the response that claims to come from the origin server is valid, and that there is at least one such signature. Stores the json in server_keys_json so that it can be used for future responses to /_matrix/key/v2/query. Args: from_server (str): the name of the server producing this result: either the origin server for a /_matrix/key/v2/server request, or the notary for a /_matrix/key/v2/query. response_json (dict): the json-decoded Server Keys response object time_added_ms (int): the timestamp to record in server_keys_json Returns: Deferred[dict[str, FetchKeyResult]]: map from key_id to result object """ ts_valid_until_ms = response_json["valid_until_ts"] # start by extracting the keys from the response, since they may be required # to validate the signature on the response. verify_keys = {} for key_id, key_data in response_json["verify_keys"].items(): if is_signing_algorithm_supported(key_id): key_base64 = key_data["key"] key_bytes = decode_base64(key_base64) verify_key = decode_verify_key_bytes(key_id, key_bytes) verify_keys[key_id] = FetchKeyResult( verify_key=verify_key, valid_until_ts=ts_valid_until_ms) server_name = response_json["server_name"] verified = False for key_id in response_json["signatures"].get(server_name, {}): # each of the keys used for the signature must be present in the response # json. key = verify_keys.get(key_id) if not key: raise KeyLookupError( "Key response is signed by key id %s:%s but that key is not " "present in the response" % (server_name, key_id)) verify_signed_json(response_json, server_name, key.verify_key) verified = True if not verified: raise KeyLookupError( "Key response for %s is not signed by the origin server" % (server_name, )) for key_id, key_data in response_json["old_verify_keys"].items(): if is_signing_algorithm_supported(key_id): key_base64 = key_data["key"] key_bytes = decode_base64(key_base64) verify_key = decode_verify_key_bytes(key_id, key_bytes) verify_keys[key_id] = FetchKeyResult( verify_key=verify_key, valid_until_ts=key_data["expired_ts"]) # re-sign the json with our own key, so that it is ready if we are asked to # give it out as a notary server signed_key_json = sign_json(response_json, self.config.server_name, self.config.signing_key[0]) signed_key_json_bytes = encode_canonical_json(signed_key_json) yield make_deferred_yieldable( defer.gatherResults( [ run_in_background( self.store.store_server_keys_json, server_name=server_name, key_id=key_id, from_server=from_server, ts_now_ms=time_added_ms, ts_expires_ms=ts_valid_until_ms, key_json_bytes=signed_key_json_bytes, ) for key_id in verify_keys ], consumeErrors=True, ).addErrback(unwrapFirstError)) defer.returnValue(verify_keys)
def query_keys(self, request, query, query_remote_on_cache_miss=False): logger.info("Handling query for keys %r", query) store_queries = [] for server_name, key_ids in query.items(): if ( self.federation_domain_whitelist is not None and server_name not in self.federation_domain_whitelist ): logger.debug("Federation denied with %s", server_name) continue if not key_ids: key_ids = (None,) for key_id in key_ids: store_queries.append((server_name, key_id, None)) cached = yield self.store.get_server_keys_json(store_queries) json_results = set() time_now_ms = self.clock.time_msec() cache_misses = dict() for (server_name, key_id, from_server), results in cached.items(): results = [(result["ts_added_ms"], result) for result in results] if not results and key_id is not None: cache_misses.setdefault(server_name, set()).add(key_id) continue if key_id is not None: ts_added_ms, most_recent_result = max(results) ts_valid_until_ms = most_recent_result["ts_valid_until_ms"] req_key = query.get(server_name, {}).get(key_id, {}) req_valid_until = req_key.get("minimum_valid_until_ts") miss = False if req_valid_until is not None: if ts_valid_until_ms < req_valid_until: logger.debug( "Cached response for %r/%r is older than requested" ": valid_until (%r) < minimum_valid_until (%r)", server_name, key_id, ts_valid_until_ms, req_valid_until, ) miss = True else: logger.debug( "Cached response for %r/%r is newer than requested" ": valid_until (%r) >= minimum_valid_until (%r)", server_name, key_id, ts_valid_until_ms, req_valid_until, ) elif (ts_added_ms + ts_valid_until_ms) / 2 < time_now_ms: logger.debug( "Cached response for %r/%r is too old" ": (added (%r) + valid_until (%r)) / 2 < now (%r)", server_name, key_id, ts_added_ms, ts_valid_until_ms, time_now_ms, ) # We more than half way through the lifetime of the # response. We should fetch a fresh copy. miss = True else: logger.debug( "Cached response for %r/%r is still valid" ": (added (%r) + valid_until (%r)) / 2 < now (%r)", server_name, key_id, ts_added_ms, ts_valid_until_ms, time_now_ms, ) if miss: cache_misses.setdefault(server_name, set()).add(key_id) json_results.add(bytes(most_recent_result["key_json"])) else: for ts_added, result in results: json_results.add(bytes(result["key_json"])) if cache_misses and query_remote_on_cache_miss: yield self.fetcher.get_keys(cache_misses) yield self.query_keys(request, query, query_remote_on_cache_miss=False) else: signed_keys = [] for key_json in json_results: key_json = json.loads(key_json) for signing_key in self.config.key_server_signing_keys: key_json = sign_json(key_json, self.config.server_name, signing_key) signed_keys.append(key_json) results = {"server_keys": signed_keys} respond_with_json_bytes(request, 200, encode_canonical_json(results))
import ecdsa sk256 = ecdsa.SigningKey.generate(curve=ecdsa.NIST256p) vk = sk256.get_verifying_key() header = { 'alg': 'ES256' } payload = { 'claim': 'JSON is the raddest.', 'iss': 'brianb' } sig = jws.sign(header, payload, sk256) print(jws.verify(header, payload, sig, vk)) ''' #------ from signedjson.key import generate_signing_key, get_verify_key, encode_signing_key_base64, decode_signing_key_base64 from signedjson.sign import (sign_json, verify_signed_json, SignatureVerifyException) signing_key = generate_signing_key('zxcvb') signed_json = sign_json({'my_key': 'my_data'}, 'CDF', signing_key) print(signed_json) #Going into function verify_key = get_verify_key(signing_key) print("Going into function", verify_key) enc = encode_signing_key_base64(verify_key) print("Base64 encoded:", enc) print("This will go into function") print(decode_signing_key_base64("ed25519", "zxcvb", enc)) try: verify_signed_json(signed_json, 'CDF', verify_key) print('Signature is valid')
y = arr else: return sum(arr) print(summer_69([1, 3, 5])) print(summer_69([4, 5, 6, 7, 8, 9])) print(summer_69([2, 1, 6, 9, 11])) print(summer_69([])) ''' print("\n") from signedjson.key import generate_signing_key, get_verify_key, encode_signing_key_base64 from signedjson.sign import ( sign_json, verify_signed_json, SignatureVerifyException ) signing_key = generate_signing_key('zxcvb') base_64_signing_key = encode_signing_key_base64(signing_key) print(base_64_signing_key) signed_json = sign_json({'my_key': 'my_data'},'Drew', signing_key) print(signed_json) verify_key = get_verify_key(signing_key) base_64_verify_key = encode_signing_key_base64(verify_key) print(base_64_verify_key) #check = get_verify_key(base_64_signing_key) try: verify_signed_json(signed_json, 'Drew', verify_key) print('Signature is valid') except SignatureVerifyException: print('Signature is invalid')
def test_upload_signatures(self): """Uploading signatures on some devices should produce updates for that user""" e2e_handler = self.hs.get_e2e_keys_handler() # register two devices u1 = self.register_user("user", "pass") self.login(u1, "pass", device_id="D1") self.login(u1, "pass", device_id="D2") # expect two edus self.assertEqual(len(self.edus), 2) stream_id = None stream_id = self.check_device_update_edu(self.edus.pop(0), u1, "D1", stream_id) stream_id = self.check_device_update_edu(self.edus.pop(0), u1, "D2", stream_id) # upload signing keys for each device device1_signing_key = self.generate_and_upload_device_signing_key( u1, "D1") device2_signing_key = self.generate_and_upload_device_signing_key( u1, "D2") # expect two more edus self.assertEqual(len(self.edus), 2) stream_id = self.check_device_update_edu(self.edus.pop(0), u1, "D1", stream_id) stream_id = self.check_device_update_edu(self.edus.pop(0), u1, "D2", stream_id) # upload master key and self-signing key master_signing_key = generate_self_id_key() master_key = { "user_id": u1, "usage": ["master"], "keys": { key_id(master_signing_key): encode_pubkey(master_signing_key) }, } # private key: HvQBbU+hc2Zr+JP1sE0XwBe1pfZZEYtJNPJLZJtS+F8 selfsigning_signing_key = generate_self_id_key() selfsigning_key = { "user_id": u1, "usage": ["self_signing"], "keys": { key_id(selfsigning_signing_key): encode_pubkey(selfsigning_signing_key) }, } sign.sign_json(selfsigning_key, u1, master_signing_key) cross_signing_keys = { "master_key": master_key, "self_signing_key": selfsigning_key, } self.get_success( e2e_handler.upload_signing_keys_for_user(u1, cross_signing_keys)) # expect signing key update edu self.assertEqual(len(self.edus), 1) self.assertEqual( self.edus.pop(0)["edu_type"], "org.matrix.signing_key_update") # sign the devices d1_json = build_device_dict(u1, "D1", device1_signing_key) sign.sign_json(d1_json, u1, selfsigning_signing_key) d2_json = build_device_dict(u1, "D2", device2_signing_key) sign.sign_json(d2_json, u1, selfsigning_signing_key) ret = self.get_success( e2e_handler.upload_signatures_for_device_keys( u1, {u1: { "D1": d1_json, "D2": d2_json }}, )) self.assertEqual(ret["failures"], {}) # expect two edus, in one or two transactions. We don't know what order the # devices will be updated. self.assertEqual(len(self.edus), 2) stream_id = None # FIXME: there is a discontinuity in the stream IDs: see #7142 for edu in self.edus: self.assertEqual(edu["edu_type"], "m.device_list_update") c = edu["content"] if stream_id is not None: self.assertEqual(c["prev_id"], [stream_id]) self.assertGreaterEqual(c["stream_id"], stream_id) stream_id = c["stream_id"] devices = {edu["content"]["device_id"] for edu in self.edus} self.assertEqual({"D1", "D2"}, devices)
async def query_keys( self, request: SynapseRequest, query: JsonDict, query_remote_on_cache_miss: bool = False, ) -> None: logger.info("Handling query for keys %r", query) store_queries = [] for server_name, key_ids in query.items(): if (self.federation_domain_whitelist is not None and server_name not in self.federation_domain_whitelist): logger.debug("Federation denied with %s", server_name) continue if not key_ids: key_ids = (None, ) for key_id in key_ids: store_queries.append((server_name, key_id, None)) cached = await self.store.get_server_keys_json(store_queries) json_results: Set[bytes] = set() time_now_ms = self.clock.time_msec() # Note that the value is unused. cache_misses: Dict[str, Dict[str, int]] = {} for (server_name, key_id, _), key_results in cached.items(): results = [(result["ts_added_ms"], result) for result in key_results] if not results and key_id is not None: cache_misses.setdefault(server_name, {})[key_id] = 0 continue if key_id is not None: ts_added_ms, most_recent_result = max(results) ts_valid_until_ms = most_recent_result["ts_valid_until_ms"] req_key = query.get(server_name, {}).get(key_id, {}) req_valid_until = req_key.get("minimum_valid_until_ts") miss = False if req_valid_until is not None: if ts_valid_until_ms < req_valid_until: logger.debug( "Cached response for %r/%r is older than requested" ": valid_until (%r) < minimum_valid_until (%r)", server_name, key_id, ts_valid_until_ms, req_valid_until, ) miss = True else: logger.debug( "Cached response for %r/%r is newer than requested" ": valid_until (%r) >= minimum_valid_until (%r)", server_name, key_id, ts_valid_until_ms, req_valid_until, ) elif (ts_added_ms + ts_valid_until_ms) / 2 < time_now_ms: logger.debug( "Cached response for %r/%r is too old" ": (added (%r) + valid_until (%r)) / 2 < now (%r)", server_name, key_id, ts_added_ms, ts_valid_until_ms, time_now_ms, ) # We more than half way through the lifetime of the # response. We should fetch a fresh copy. miss = True else: logger.debug( "Cached response for %r/%r is still valid" ": (added (%r) + valid_until (%r)) / 2 < now (%r)", server_name, key_id, ts_added_ms, ts_valid_until_ms, time_now_ms, ) if miss: cache_misses.setdefault(server_name, {})[key_id] = 0 # Cast to bytes since postgresql returns a memoryview. json_results.add(bytes(most_recent_result["key_json"])) else: for _, result in results: # Cast to bytes since postgresql returns a memoryview. json_results.add(bytes(result["key_json"])) # If there is a cache miss, request the missing keys, then recurse (and # ensure the result is sent). if cache_misses and query_remote_on_cache_miss: await yieldable_gather_results( lambda t: self.fetcher.get_keys(*t), ((server_name, list(keys), 0) for server_name, keys in cache_misses.items()), ) await self.query_keys(request, query, query_remote_on_cache_miss=False) else: signed_keys = [] for key_json_raw in json_results: key_json = json_decoder.decode(key_json_raw.decode("utf-8")) for signing_key in self.config.key.key_server_signing_keys: key_json = sign_json(key_json, self.config.server.server_name, signing_key) signed_keys.append(key_json) response = {"server_keys": signed_keys} respond_with_json(request, 200, response, canonical_json=True)
def process_v2_response( self, from_server, response_json, requested_ids=[], ): """Parse a 'Server Keys' structure from the result of a /key request This is used to parse either the entirety of the response from GET /_matrix/key/v2/server, or a single entry from the list returned by POST /_matrix/key/v2/query. Checks that each signature in the response that claims to come from the origin server is valid. (Does not check that there actually is such a signature, for some reason.) Stores the json in server_keys_json so that it can be used for future responses to /_matrix/key/v2/query. Args: from_server (str): the name of the server producing this result: either the origin server for a /_matrix/key/v2/server request, or the notary for a /_matrix/key/v2/query. response_json (dict): the json-decoded Server Keys response object requested_ids (iterable[str]): a list of the key IDs that were requested. We will store the json for these key ids as well as any that are actually in the response Returns: Deferred[dict[str, nacl.signing.VerifyKey]]: map from key_id to key object """ time_now_ms = self.clock.time_msec() response_keys = {} verify_keys = {} for key_id, key_data in response_json["verify_keys"].items(): if is_signing_algorithm_supported(key_id): key_base64 = key_data["key"] key_bytes = decode_base64(key_base64) verify_key = decode_verify_key_bytes(key_id, key_bytes) verify_key.time_added = time_now_ms verify_keys[key_id] = verify_key old_verify_keys = {} for key_id, key_data in response_json["old_verify_keys"].items(): if is_signing_algorithm_supported(key_id): key_base64 = key_data["key"] key_bytes = decode_base64(key_base64) verify_key = decode_verify_key_bytes(key_id, key_bytes) verify_key.expired = key_data["expired_ts"] verify_key.time_added = time_now_ms old_verify_keys[key_id] = verify_key server_name = response_json["server_name"] for key_id in response_json["signatures"].get(server_name, {}): if key_id not in response_json["verify_keys"]: raise KeyLookupError( "Key response must include verification keys for all" " signatures") if key_id in verify_keys: verify_signed_json(response_json, server_name, verify_keys[key_id]) signed_key_json = sign_json( response_json, self.config.server_name, self.config.signing_key[0], ) signed_key_json_bytes = encode_canonical_json(signed_key_json) ts_valid_until_ms = signed_key_json[u"valid_until_ts"] updated_key_ids = set(requested_ids) updated_key_ids.update(verify_keys) updated_key_ids.update(old_verify_keys) response_keys.update(verify_keys) response_keys.update(old_verify_keys) yield logcontext.make_deferred_yieldable( defer.gatherResults( [ run_in_background( self.store.store_server_keys_json, server_name=server_name, key_id=key_id, from_server=from_server, ts_now_ms=time_now_ms, ts_expires_ms=ts_valid_until_ms, key_json_bytes=signed_key_json_bytes, ) for key_id in updated_key_ids ], consumeErrors=True, ).addErrback(unwrapFirstError)) defer.returnValue(response_keys)
def test_upload_signatures(self) -> None: """should check signatures that are uploaded""" # set up a user with cross-signing keys and a device. This user will # try uploading signatures local_user = "******" + self.hs.hostname device_id = "xyz" # private key: OMkooTr76ega06xNvXIGPbgvvxAOzmQncN8VObS7aBA device_pubkey = "NnHhnqiMFQkq969szYkooLaBAXW244ZOxgukCvm2ZeY" device_key = { "user_id": local_user, "device_id": device_id, "algorithms": [ "m.olm.curve25519-aes-sha2", RoomEncryptionAlgorithms.MEGOLM_V1_AES_SHA2, ], "keys": { "curve25519:xyz": "curve25519+key", "ed25519:xyz": device_pubkey }, "signatures": { local_user: { "ed25519:xyz": "something" } }, } device_signing_key = key.decode_signing_key_base64( "ed25519", "xyz", "OMkooTr76ega06xNvXIGPbgvvxAOzmQncN8VObS7aBA") self.get_success( self.handler.upload_keys_for_user(local_user, device_id, {"device_keys": device_key})) # private key: 2lonYOM6xYKdEsO+6KrC766xBcHnYnim1x/4LFGF8B0 master_pubkey = "nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unI9kDYcHwk" master_key = { "user_id": local_user, "usage": ["master"], "keys": { "ed25519:" + master_pubkey: master_pubkey }, } master_signing_key = key.decode_signing_key_base64( "ed25519", master_pubkey, "2lonYOM6xYKdEsO+6KrC766xBcHnYnim1x/4LFGF8B0") usersigning_pubkey = "Hq6gL+utB4ET+UvD5ci0kgAwsX6qP/zvf8v6OInU5iw" usersigning_key = { # private key: 4TL4AjRYwDVwD3pqQzcor+ez/euOB1/q78aTJ+czDNs "user_id": local_user, "usage": ["user_signing"], "keys": { "ed25519:" + usersigning_pubkey: usersigning_pubkey }, } usersigning_signing_key = key.decode_signing_key_base64( "ed25519", usersigning_pubkey, "4TL4AjRYwDVwD3pqQzcor+ez/euOB1/q78aTJ+czDNs") sign.sign_json(usersigning_key, local_user, master_signing_key) # private key: HvQBbU+hc2Zr+JP1sE0XwBe1pfZZEYtJNPJLZJtS+F8 selfsigning_pubkey = "EmkqvokUn8p+vQAGZitOk4PWjp7Ukp3txV2TbMPEiBQ" selfsigning_key = { "user_id": local_user, "usage": ["self_signing"], "keys": { "ed25519:" + selfsigning_pubkey: selfsigning_pubkey }, } selfsigning_signing_key = key.decode_signing_key_base64( "ed25519", selfsigning_pubkey, "HvQBbU+hc2Zr+JP1sE0XwBe1pfZZEYtJNPJLZJtS+F8") sign.sign_json(selfsigning_key, local_user, master_signing_key) cross_signing_keys = { "master_key": master_key, "user_signing_key": usersigning_key, "self_signing_key": selfsigning_key, } self.get_success( self.handler.upload_signing_keys_for_user(local_user, cross_signing_keys)) # set up another user with a master key. This user will be signed by # the first user other_user = "******" + self.hs.hostname other_master_pubkey = "fHZ3NPiKxoLQm5OoZbKa99SYxprOjNs4TwJUKP+twCM" other_master_key = { # private key: oyw2ZUx0O4GifbfFYM0nQvj9CL0b8B7cyN4FprtK8OI "user_id": other_user, "usage": ["master"], "keys": { "ed25519:" + other_master_pubkey: other_master_pubkey }, } self.get_success( self.handler.upload_signing_keys_for_user( other_user, {"master_key": other_master_key})) # test various signature failures (see below) ret = self.get_success( self.handler.upload_signatures_for_device_keys( local_user, { local_user: { # fails because the signature is invalid # should fail with INVALID_SIGNATURE device_id: { "user_id": local_user, "device_id": device_id, "algorithms": [ "m.olm.curve25519-aes-sha2", RoomEncryptionAlgorithms.MEGOLM_V1_AES_SHA2, ], "keys": { "curve25519:xyz": "curve25519+key", # private key: OMkooTr76ega06xNvXIGPbgvvxAOzmQncN8VObS7aBA "ed25519:xyz": device_pubkey, }, "signatures": { local_user: { "ed25519:" + selfsigning_pubkey: "something" } }, }, # fails because device is unknown # should fail with NOT_FOUND "unknown": { "user_id": local_user, "device_id": "unknown", "signatures": { local_user: { "ed25519:" + selfsigning_pubkey: "something" } }, }, # fails because the signature is invalid # should fail with INVALID_SIGNATURE master_pubkey: { "user_id": local_user, "usage": ["master"], "keys": { "ed25519:" + master_pubkey: master_pubkey }, "signatures": { local_user: { "ed25519:" + device_pubkey: "something" } }, }, }, other_user: { # fails because the device is not the user's master-signing key # should fail with NOT_FOUND "unknown": { "user_id": other_user, "device_id": "unknown", "signatures": { local_user: { "ed25519:" + usersigning_pubkey: "something" } }, }, other_master_pubkey: { # fails because the key doesn't match what the server has # should fail with UNKNOWN "user_id": other_user, "usage": ["master"], "keys": { "ed25519:" + other_master_pubkey: other_master_pubkey }, "something": "random", "signatures": { local_user: { "ed25519:" + usersigning_pubkey: "something" } }, }, }, }, )) user_failures = ret["failures"][local_user] self.assertEqual(user_failures[device_id]["errcode"], Codes.INVALID_SIGNATURE) self.assertEqual(user_failures[master_pubkey]["errcode"], Codes.INVALID_SIGNATURE) self.assertEqual(user_failures["unknown"]["errcode"], Codes.NOT_FOUND) other_user_failures = ret["failures"][other_user] self.assertEqual(other_user_failures["unknown"]["errcode"], Codes.NOT_FOUND) self.assertEqual(other_user_failures[other_master_pubkey]["errcode"], Codes.UNKNOWN) # test successful signatures del device_key["signatures"] sign.sign_json(device_key, local_user, selfsigning_signing_key) sign.sign_json(master_key, local_user, device_signing_key) sign.sign_json(other_master_key, local_user, usersigning_signing_key) ret = self.get_success( self.handler.upload_signatures_for_device_keys( local_user, { local_user: { device_id: device_key, master_pubkey: master_key }, other_user: { other_master_pubkey: other_master_key }, }, )) self.assertEqual(ret["failures"], {}) # fetch the signed keys/devices and make sure that the signatures are there ret = self.get_success( self.handler.query_devices( {"device_keys": { local_user: [], other_user: [] }}, 0, local_user, "device123", )) self.assertEqual( ret["device_keys"][local_user]["xyz"]["signatures"][local_user][ "ed25519:" + selfsigning_pubkey], device_key["signatures"][local_user]["ed25519:" + selfsigning_pubkey], ) self.assertEqual( ret["master_keys"][local_user]["signatures"][local_user][ "ed25519:" + device_id], master_key["signatures"][local_user]["ed25519:" + device_id], ) self.assertEqual( ret["master_keys"][other_user]["signatures"][local_user][ "ed25519:" + usersigning_pubkey], other_master_key["signatures"][local_user]["ed25519:" + usersigning_pubkey], )
def process_v2_response(self, from_server, response_json, requested_ids=[], only_from_server=True): time_now_ms = self.clock.time_msec() response_keys = {} verify_keys = {} for key_id, key_data in response_json["verify_keys"].items(): if is_signing_algorithm_supported(key_id): key_base64 = key_data["key"] key_bytes = decode_base64(key_base64) verify_key = decode_verify_key_bytes(key_id, key_bytes) verify_key.time_added = time_now_ms verify_keys[key_id] = verify_key old_verify_keys = {} for key_id, key_data in response_json["old_verify_keys"].items(): if is_signing_algorithm_supported(key_id): key_base64 = key_data["key"] key_bytes = decode_base64(key_base64) verify_key = decode_verify_key_bytes(key_id, key_bytes) verify_key.expired = key_data["expired_ts"] verify_key.time_added = time_now_ms old_verify_keys[key_id] = verify_key results = {} server_name = response_json["server_name"] if only_from_server: if server_name != from_server: raise ValueError( "Expected a response for server %r not %r" % ( from_server, server_name ) ) for key_id in response_json["signatures"].get(server_name, {}): if key_id not in response_json["verify_keys"]: raise ValueError( "Key response must include verification keys for all" " signatures" ) if key_id in verify_keys: verify_signed_json( response_json, server_name, verify_keys[key_id] ) signed_key_json = sign_json( response_json, self.config.server_name, self.config.signing_key[0], ) signed_key_json_bytes = encode_canonical_json(signed_key_json) ts_valid_until_ms = signed_key_json[u"valid_until_ts"] updated_key_ids = set(requested_ids) updated_key_ids.update(verify_keys) updated_key_ids.update(old_verify_keys) response_keys.update(verify_keys) response_keys.update(old_verify_keys) yield defer.gatherResults( [ preserve_fn(self.store.store_server_keys_json)( server_name=server_name, key_id=key_id, from_server=server_name, ts_now_ms=time_now_ms, ts_expires_ms=ts_valid_until_ms, key_json_bytes=signed_key_json_bytes, ) for key_id in updated_key_ids ], consumeErrors=True, ).addErrback(unwrapFirstError) results[server_name] = response_keys defer.returnValue(results)
def process_v2_response(self, from_server, response_json, requested_ids=[], only_from_server=True): time_now_ms = self.clock.time_msec() response_keys = {} verify_keys = {} for key_id, key_data in response_json["verify_keys"].items(): if is_signing_algorithm_supported(key_id): key_base64 = key_data["key"] key_bytes = decode_base64(key_base64) verify_key = decode_verify_key_bytes(key_id, key_bytes) verify_key.time_added = time_now_ms verify_keys[key_id] = verify_key old_verify_keys = {} for key_id, key_data in response_json["old_verify_keys"].items(): if is_signing_algorithm_supported(key_id): key_base64 = key_data["key"] key_bytes = decode_base64(key_base64) verify_key = decode_verify_key_bytes(key_id, key_bytes) verify_key.expired = key_data["expired_ts"] verify_key.time_added = time_now_ms old_verify_keys[key_id] = verify_key results = {} server_name = response_json["server_name"] if only_from_server: if server_name != from_server: raise KeyLookupError( "Expected a response for server %r not %r" % ( from_server, server_name ) ) for key_id in response_json["signatures"].get(server_name, {}): if key_id not in response_json["verify_keys"]: raise KeyLookupError( "Key response must include verification keys for all" " signatures" ) if key_id in verify_keys: verify_signed_json( response_json, server_name, verify_keys[key_id] ) signed_key_json = sign_json( response_json, self.config.server_name, self.config.signing_key[0], ) signed_key_json_bytes = encode_canonical_json(signed_key_json) ts_valid_until_ms = signed_key_json[u"valid_until_ts"] updated_key_ids = set(requested_ids) updated_key_ids.update(verify_keys) updated_key_ids.update(old_verify_keys) response_keys.update(verify_keys) response_keys.update(old_verify_keys) yield logcontext.make_deferred_yieldable(defer.gatherResults( [ run_in_background( self.store.store_server_keys_json, server_name=server_name, key_id=key_id, from_server=server_name, ts_now_ms=time_now_ms, ts_expires_ms=ts_valid_until_ms, key_json_bytes=signed_key_json_bytes, ) for key_id in updated_key_ids ], consumeErrors=True, ).addErrback(unwrapFirstError)) results[server_name] = response_keys defer.returnValue(results)